sphinx-2.0.4-release/0000755000176700017710000000000011724063141013742 5ustar deogardeogarsphinx-2.0.4-release/INSTALL0000644000176700017710000000011710447576537015015 0ustar deogardeogarPlease refer to <> section in doc/sphinx.txt or doc/sphinx.html. sphinx-2.0.4-release/win/0000755000176700017710000000000011724063141014537 5ustar deogardeogarsphinx-2.0.4-release/win/tests05.vcproj0000644000176700017710000001066711601445545017313 0ustar deogardeogar sphinx-2.0.4-release/win/search05.vcproj0000644000176700017710000001067511601445545017415 0ustar deogardeogar sphinx-2.0.4-release/win/searchd08.vcproj0000644000176700017710000002067311566163456017573 0ustar deogardeogar sphinx-2.0.4-release/win/tests08.vcproj0000644000176700017710000002053511566163456017321 0ustar deogardeogar sphinx-2.0.4-release/win/search08.vcproj0000644000176700017710000002053211566163456017421 0ustar deogardeogar sphinx-2.0.4-release/win/spelldump08.vcproj0000644000176700017710000002025511566163456020163 0ustar deogardeogar sphinx-2.0.4-release/win/searchd05.vcproj0000644000176700017710000001111111723172513017540 0ustar deogardeogar sphinx-2.0.4-release/win/libsphinx05.vcproj0000644000176700017710000001402211601445545020136 0ustar deogardeogar sphinx-2.0.4-release/win/indextool05.vcproj0000644000176700017710000001123611601445545020147 0ustar deogardeogar sphinx-2.0.4-release/win/spelldump05.vcproj0000644000176700017710000001056111601445545020147 0ustar deogardeogar sphinx-2.0.4-release/win/indexer05.vcproj0000644000176700017710000001101311601445545017571 0ustar deogardeogar sphinx-2.0.4-release/win/indexer08.vcproj0000644000176700017710000002054311566163456017614 0ustar deogardeogar sphinx-2.0.4-release/win/testrt08.vcproj0000644000176700017710000001775211566163456017513 0ustar deogardeogar sphinx-2.0.4-release/win/indextool08.vcproj0000644000176700017710000002033511566163456020162 0ustar deogardeogar sphinx-2.0.4-release/win/libsphinx08.vcproj0000644000176700017710000002371711566163456020164 0ustar deogardeogar sphinx-2.0.4-release/win/testrt05.vcproj0000644000176700017710000001067511601445545017475 0ustar deogardeogar sphinx-2.0.4-release/configure.ac0000644000176700017710000003567411723635623016260 0ustar deogardeogardnl Process this file with autoconf to produce a configure script. AC_PREREQ(2.59) AC_INIT([sphinx], [2.0.4], [shodan(at)sphinxsearch.com]) dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([checking build environment]) AC_CONFIG_AUX_DIR([config]) AM_INIT_AUTOMAKE([-Wall -Werror foreign]) AM_MAINTAINER_MODE AC_CONFIG_SRCDIR([src/searchd.cpp]) AC_CONFIG_HEADER([config/config.h]) # hack to locate expat/iconv in /usr/local on BSD systems CPPFLAGS="$CPPFLAGS -I/usr/local/include" LIBS="$LIBS -L/usr/local/lib" dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([checking for compiler programs]) AC_ARG_WITH([debug], AC_HELP_STRING([--with-debug], [compile slower debug version (default is disabled)]), [ac_cv_use_debug=$withval], [ac_cv_use_debug=no] ) AC_MSG_CHECKING([whether to compile debug version]) if test x$ac_cv_use_debug != xno; then SPHINX_CFLAGS="-Wall -g -D_FILE_OFFSET_BITS=64" SPHINX_INJECT_FLAGS="-D_FILE_OFFSET_BITS=64" AC_MSG_RESULT([yes]) else SPHINX_CFLAGS="-Wall -g -D_FILE_OFFSET_BITS=64 -O3 -DNDEBUG" SPHINX_INJECT_FLAGS="-D_FILE_OFFSET_BITS=64 -DNDEBUG" AC_MSG_RESULT([no]) fi dnl set flags for C compiler if there are no user overrides dnl inject required defines if there are if test x$ac_env_CFLAGS_set != xset; then CFLAGS=$SPHINX_CFLAGS else CFLAGS="$CFLAGS $SPHINX_INJECT_FLAGS" fi dnl set flags for C++ compiler if there are no user overrides dnl inject required defines if there are if test x$ac_env_CXXFLAGS_set != xset; then CXXFLAGS=$SPHINX_CFLAGS else CXXFLAGS="$CXXFLAGS $SPHINX_INJECT_FLAGS" fi AC_PROG_CC AC_PROG_CXX AC_PROG_RANLIB AC_COMPILE_IFELSE([ #ifdef __GNUC__ #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3) void main() {} #else syntax error #endif #endif ], [], [AC_MSG_ERROR([Gcc version error. Minspec is 3.4])]) AC_DEFINE_UNQUOTED([COMPILER],"$CC `$CC -dumpversion`",[Define to be the name of the compiler.]) AC_DEFINE_UNQUOTED([OS_UNAME],"`uname -a`",[Full name OS]) dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([checking for header files]) # Checks for header files. AC_HEADER_STDC AC_HEADER_SYS_WAIT AC_CHECK_HEADERS([fcntl.h limits.h netdb.h netinet/in.h stdlib.h string.h sys/file.h sys/socket.h sys/time.h unistd.h pthread.h execinfo.h]) AC_CHECK_HEADER(expat.h,[have_expat_h=yes],[have_expat_h=no]) AC_CHECK_HEADER(iconv.h,[have_iconv_h=yes],[have_iconv_h=no]) AC_CHECK_HEADER(zlib.h,[have_zlib_h=yes],[have_zlib_h=no]) AC_CHECK_HEADER(sql.h,[have_sql_h=yes],[have_sql_h=no]) AC_CHECK_HEADER(syslog.h,[have_syslog_h=yes],[have_syslog_h=no]) dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([checking for types]) # Checks for typedefs, structures, and compiler characteristics. AC_HEADER_STDBOOL AC_C_CONST AC_C_INLINE AC_TYPE_OFF_T AC_TYPE_SIZE_T AC_HEADER_TIME dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([checking for library functions]) # Checks for library functions. AC_FUNC_FORK AC_FUNC_MALLOC AC_FUNC_REALLOC AC_FUNC_SELECT_ARGTYPES AC_TYPE_SIGNAL AC_FUNC_STAT AC_FUNC_VPRINTF AC_SEARCH_LIBS([setsockopt],[socket]) AC_SEARCH_LIBS([gethostbyname],[nsl socket resolv]) AC_SEARCH_LIBS([XML_Parse],[expat],[have_libexpat=yes],[have_libexpat=no]) AC_SEARCH_LIBS([iconv],[iconv],[have_libiconv=yes],[have_libiconv=no]) AC_SEARCH_LIBS([inflate],[z],[have_lz=yes],[have_lz=no]) AC_SEARCH_LIBS([logf],[m]) AC_CHECK_FUNCS([dup2 gethostbyname gettimeofday memmove memset select socket strcasecmp strchr strerror strncasecmp strstr strtol logf pread]) AC_CHECK_FUNCS([backtrace backtrace_symbols]) # most systems require the program be linked with librt library to use # the function clock_gettime my_save_LIBS="$LIBS" LIBS="" AC_CHECK_LIB(rt,clock_gettime) LIBRT=$LIBS LIBS="$my_save_LIBS" AC_SUBST(LIBRT) LIBS="$LIBS $LIBRT" AC_CHECK_FUNCS(clock_gettime) SPHINX_CHECK_DEFINE(LOCK_EX,sys/file.h) SPHINX_CHECK_DEFINE(F_SETLKW,fcntl.h) # check for dlopen # FIXME! technically, only needed in searchd # but as UDF manager is curently in libsphinx, we link everything AC_CHECK_LIB(dl,dlopen) AC_CHECK_FUNCS(dlopen dlerror) dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([configuring Sphinx]) dnl --- # check for pthreads dnl helper that runs a test program and checks for success pthread_prog=" #include #include void * thread_routine ( void * data ) { return data; } int main () { pthread_t thd; pthread_mutexattr_t mattr; pthread_once_t once_init = PTHREAD_ONCE_INIT; int data = 1; pthread_mutexattr_init ( &mattr ); return pthread_create ( &thd, NULL, thread_routine, &data ); } " AC_DEFUN([PTHREADS_TRY_RUNCOMPILE],[ if test x$cross_compiling = xno ; then AC_TRY_RUN($pthread_prog, [pthreads_try_run=yes], [pthreads_try_run=no], [pthreads_try_run=no]) if test x$pthreads_try_run = xyes ; then $1 fi else AC_COMPILE_IFELSE($pthread_prog, [pthreads_try_compile=yes], [pthreads_try_compile=no], [pthreads_try_compile=no]) if test x$pthreads_try_compile = xyes ; then $1 fi fi ]) AC_DEFUN([PTHREADS_TRY_RUNLINK],[ if test x$cross_compiling = xno ; then AC_TRY_RUN($pthread_prog, [pthreads_try_run=yes], [pthreads_try_run=no], [pthreads_try_run=no]) if test x$pthreads_try_run = xyes ; then $1 fi else AC_LINK_IFELSE($pthread_prog, [pthreads_try_link=yes], [pthreads_try_link=no], [pthreads_try_link=no]) if test x$pthreads_try_link = xyes ; then $1 fi fi ]) # check for needed cflags AC_CACHE_CHECK([for CFLAGS needed for pthreads], [sphinx_cv_pthreads_cflags], [ save_cflags=$CFLAGS for flag in none -kthread -pthread -pthreads -mt -mthreads -Kthread -threads; do CFLAGS=$save_cflags test "x$flag" != "xnone" && CFLAGS="$CFLAGS $flag" PTHREADS_TRY_RUNCOMPILE([ sphinx_cv_pthreads_cflags="$flag" break ]) done CFLAGS=$save_cflags ]) if test -n "$sphinx_cv_pthreads_cflags"; then have_pthreads=yes if test "x$sphinx_cv_pthreads_cflags" != "xnone"; then CPPFLAGS="$CPPFLAGS $sphinx_cv_pthreads_cflags" fi fi # check for needed libs AC_CACHE_CHECK([for LIBS needed for pthreads], [sphinx_cv_pthreads_libs], [ save_libs=$LIBS for lib in -lpthread -lpthreads -lc_r; do LIBS="$save_libs $lib" PTHREADS_TRY_RUNLINK([ sphinx_cv_pthreads_libs=$lib break ]) done LIBS=$save_libs ]) if test -n "$sphinx_cv_pthreads_libs"; then have_pthreads=yes LIBS="$LIBS $sphinx_cv_pthreads_libs" fi # final check AC_MSG_CHECKING([for pthreads]) if test x$have_pthreads = xyes; then if test x$cross_compiling = xno; then AC_MSG_RESULT([found]) else AC_MSG_RESULT([assumed as found (cross-compiling)]) fi else AC_MSG_ERROR([no working pthreads library found]) fi AC_CHECK_FUNCS([pthread_mutex_timedlock]) dnl --- # check if we should compile with MySQL support AC_ARG_WITH([mysql], AC_HELP_STRING([--with-mysql], [compile with MySQL support (default is enabled)]), [ac_cv_use_mysql=$withval], [ac_cv_use_mysql=yes] ) AC_MSG_CHECKING([whether to compile with MySQL support]) if test x$ac_cv_use_mysql != xno; then AC_MSG_RESULT([yes]) AC_CHECK_MYSQL([$ac_cv_use_mysql]) AC_DEFINE(USE_MYSQL,1,[Define to 1 if you want to compile with MySQL support]) AC_SUBST([MYSQL_LIBS]) AC_SUBST([MYSQL_CFLAGS]) else AC_MSG_RESULT([no]) fi AM_CONDITIONAL(USE_MYSQL, test x$ac_cv_use_mysql != xno) # check if we should statically link the mysql library AC_ARG_WITH([static-mysql], AC_HELP_STRING([--with-static-mysql], [link statically with MySQL library (default is no)]), [ac_cv_use_static_mysql=$withval], [ac_cv_use_static_mysql=no] ) AC_MSG_CHECKING([whether to link statically with MySQL support]) if test x$ac_cv_use_mysql != xno; then if test x$ac_cv_use_static_mysql != xno; then AC_CHECK_MYSQL([$ac_cv_use_static_mysql]) MYSQL_LIBS=`echo $MYSQL_LIBS | sed -e 's/\-Bdynamic/\-Bstatic/g'` MYSQL_LIBS="-Wl,-Bstatic $MYSQL_LIBS -Wl,-Bdynamic" AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi fi # check if we should compile with PostgreSQL support AC_ARG_WITH([pgsql], AC_HELP_STRING([--with-pgsql], [compile with PostgreSQL support (default is disabled)]), [ac_cv_use_pgsql=$withval], [ac_cv_use_pgsql=no] ) AC_MSG_CHECKING([whether to compile with PostgreSQL support]) if test x$ac_cv_use_pgsql != xno; then AC_MSG_RESULT([yes]) AC_CHECK_PGSQL([$ac_cv_use_pgsql]) AC_DEFINE(USE_PGSQL,1,[Define to 1 if you want to compile with PostgreSQL support]) AC_SUBST([PGSQL_LIBS]) AC_SUBST([PGSQL_CFLAGS]) else AC_MSG_RESULT([no]) fi AM_CONDITIONAL(USE_PGSQL, test x$ac_cv_use_pgsql != xno) # add macports include directory if (echo $MYSQL_LIBS | grep -q -- -L/opt/local/lib); then MYSQL_CFLAGS="$MYSQL_CFLAGS -I/opt/local/include" fi # we can now set preprocessor flags for both C and C++ compilers CPPFLAGS="$CPPFLAGS $MYSQL_CFLAGS $PGSQL_CFLAGS" dnl --- AC_MSG_CHECKING([whether to use 64-bit document/word IDs]) sph_enable_id64=no AC_ARG_ENABLE([id64], [ --enable-id64 use 64-bit document and word IDs (default is no)], [sph_enable_id64=$enableval]) if test x$sph_enable_id64 != xno; then AC_DEFINE(USE_64BIT, 1, [64-bit document and word IDs]) AC_MSG_RESULT([yes]) else AC_DEFINE(USE_64BIT, 0, [64-bit document and word IDs]) AC_MSG_RESULT([no]) fi dnl --- AC_ARG_WITH([libstemmer], AC_HELP_STRING([--with-libstemmer], [compile with libstemmer support (default is disabled)]), [ac_cv_use_libstemmer=$withval], [ac_cv_use_libstemmer=no] ) AC_MSG_CHECKING([whether to compile with libstemmer support]) if test x$ac_cv_use_libstemmer != xno; then if test -d libstemmer_c && test -f libstemmer_c/include/libstemmer.h; then AC_MSG_RESULT([yes]) AC_DEFINE(USE_LIBSTEMMER, 1, [libstemmer support]) else AC_MSG_ERROR([missing libstemmer sources from libstemmer_c. Please download the C version of libstemmer library from http://snowball.tartarus.org/ and extract its sources over libstemmer_c/ subdirectory in order to build Sphinx with libstemmer support. ]) fi else AC_MSG_RESULT([no]) AC_DEFINE(USE_LIBSTEMMER, 0, [libstemmer support]) fi AM_CONDITIONAL(USE_LIBSTEMMER, test x$ac_cv_use_libstemmer != xno) dnl --- got_expat=0 AC_MSG_CHECKING([for libexpat]) if test [ $have_expat_h = yes -a $have_libexpat = yes ]; then AC_DEFINE([USE_LIBEXPAT],1,[define to use expat XML library]) AC_MSG_RESULT([found]) got_expat=1 else AC_MSG_RESULT([not found]) AC_MSG_WARN([xmlpipe2 will NOT be available]) fi dnl --- AC_ARG_WITH([iconv], AC_HELP_STRING([--with-iconv], [compile with iconv support (default is autodetect)]), [ac_cv_use_iconv=$withval], [ac_cv_use_iconv=yes] ) AC_MSG_CHECKING([for libiconv]) if test [ $have_iconv_h = yes \ -a $have_libiconv = yes \ -a $got_expat -eq 1 \ -a $ac_cv_use_iconv != no ]; \ then AC_DEFINE([USE_LIBICONV],1,[define to use iconv library]) AC_MSG_RESULT([found]) AC_MSG_CHECKING([for iconv() arg types]) AC_LANG_PUSH([C++]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include #include ]], [const char * inbuf; iconv_t cd; iconv ( cd, &inbuf, NULL, NULL, NULL ); ])], [iconv_inbuf_const=yes], [iconv_inbuf_const=no]) AC_LANG_POP([C++]) if test [ $iconv_inbuf_const = yes ]; then AC_DEFINE([ICONV_INBUF_CONST],1,[whether 2nd arg to iconv() is const ptr]) AC_MSG_RESULT([const char **]) else AC_DEFINE([ICONV_INBUF_CONST],0,[whether 2nd arg to iconv() is const ptr]) AC_MSG_RESULT([char **]) fi else if test [ $got_expat -eq 1 ]; then if test [ $ac_cv_use_iconv = no ]; then AC_MSG_RESULT([disabled]) else AC_MSG_RESULT([not found]) fi AC_MSG_WARN([xmlpipe2 will only support default encodings (latin-1, utf-8)]) else AC_MSG_RESULT([not required]) fi fi dnl --- if test [ $have_zlib_h = yes -a $have_lz = yes ]; then AC_DEFINE([USE_ZLIB],1,[define to use Zlib]) fi dnl --- AC_ARG_WITH([unixodbc], AC_HELP_STRING([--with-unixodbc], [compile with UnixODBC support (default is autodetect)]), [ac_cv_use_unixodbc=$withval], [ac_cv_use_unixodbc=yes] ) AC_MSG_CHECKING([for UnixODBC]) if test [ $ac_cv_use_unixodbc != no ]; then if test [ $have_sql_h = yes ]; then AC_SEARCH_LIBS([SQLConnect],[odbc iodbc],[have_libodbc=yes],[have_libodbc=no]) if test [ $have_libodbc = yes ]; then AC_DEFINE([USE_ODBC],1,[define to use ODBC library]) AC_MSG_RESULT([found]) else AC_MSG_RESULT([not found]) AC_MSG_WARN([ODBC source support will NOT be available]) fi fi else AC_MSG_RESULT([disabled]) fi dnl --- AC_ARG_WITH([syslog], AC_HELP_STRING([--with-syslog], [compile with possibility to use syslog for logging (default is no)]), [ac_cv_use_syslog=$withval], [ac_cv_use_syslog=no] ) AC_MSG_CHECKING([for Syslog]) if test [ $ac_cv_use_syslog != no ]; then if test [ $have_syslog_h = yes ]; then AC_DEFINE([USE_SYSLOG],1,[define to use POSIX Syslog for logging]) AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi else AC_MSG_RESULT([disabled]) fi dnl --- AC_CACHE_CHECK([for unaligned RAM access],[sphinx_cv_unaligned_ram_access],[ AC_LANG_PUSH([C++]) AC_RUN_IFELSE( [AC_LANG_PROGRAM([[ #include #include ]], [[char * sBuf = new char [ 8*sizeof(int) ]; for ( int i=0; i<8*sizeof(int); i++ ) sBuf[i] = i; // check for crashes (SPARC) volatile int iRes = 0; for ( int i=0; i<(int)sizeof(int); i++ ) { int * pPtr = (int*)( sBuf+i ); iRes += *pPtr; } // check for correct values (ARM) iRes = *(int*)( sBuf+1 ); if (!( iRes==0x01020304 || iRes==0x04030201 )) return 1; // all seems ok return 0;]])], [sphinx_cv_unaligned_ram_access=yes], [sphinx_cv_unaligned_ram_access=no], [AC_MSG_RESULT([unknown (cross-compiling), assume no]) sphinx_cv_unaligned_ram_access=no]) ]) if test x$sphinx_cv_unaligned_ram_access = xyes ; then AC_DEFINE([UNALIGNED_RAM_ACCESS],1,[whether unaligned RAM access is possible]) else AC_DEFINE([UNALIGNED_RAM_ACCESS],0) fi # check endianness AC_C_BIGENDIAN( AC_DEFINE(USE_LITTLE_ENDIAN, 0, [big-endian]), AC_DEFINE(USE_LITTLE_ENDIAN, 1, [little-endian]), AC_MSG_ERROR(unknown endianess not supported), AC_MSG_ERROR(universial endianess not supported) ) dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([generating configuration files]) if test "$prefix" = "NONE"; then my_op_prefix="/var" else my_op_prefix="$localstatedir" fi CONFDIR=`eval echo "${my_op_prefix}"` AC_SUBST(CONFDIR) AC_CONFIG_FILES([Makefile src/Makefile libstemmer_c/Makefile doc/Makefile sphinx.conf.dist:sphinx.conf.in \ sphinx-min.conf.dist:sphinx-min.conf.in]) AC_OUTPUT dnl -------------------------------------------------------------------------- SPHINX_CONFIGURE_PART([configuration done]) echo "You can now run 'make install' to build and install Sphinx binaries." echo "On a multi-core machine, try 'make -j4 install' to speed up the build." echo echo "Updates, articles, help forum, and commercial support, consulting, training," echo "and development services are available at http://sphinxsearch.com/" echo echo "Thank you for choosing Sphinx!" echo sphinx-2.0.4-release/example.sql0000644000176700017710000000160711353211341016115 0ustar deogardeogarDROP TABLE IF EXISTS test.documents; CREATE TABLE test.documents ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, group_id INTEGER NOT NULL, group_id2 INTEGER NOT NULL, date_added DATETIME NOT NULL, title VARCHAR(255) NOT NULL, content TEXT NOT NULL ); REPLACE INTO test.documents ( id, group_id, group_id2, date_added, title, content ) VALUES ( 1, 1, 5, NOW(), 'test one', 'this is my test document number one. also checking search within phrases.' ), ( 2, 1, 6, NOW(), 'test two', 'this is my test document number two' ), ( 3, 2, 7, NOW(), 'another doc', 'this is another group' ), ( 4, 2, 8, NOW(), 'doc number four', 'this is to test groups' ); DROP TABLE IF EXISTS test.tags; CREATE TABLE test.tags ( docid INTEGER NOT NULL, tagid INTEGER NOT NULL, UNIQUE(docid,tagid) ); INSERT INTO test.tags VALUES (1,1), (1,3), (1,5), (1,7), (2,6), (2,4), (2,2), (3,15), (4,7), (4,40); sphinx-2.0.4-release/src/0000755000176700017710000000000011724063146014536 5ustar deogardeogarsphinx-2.0.4-release/src/sphinxexpr.cpp0000644000176700017710000027735311723621334017471 0ustar deogardeogar// // $Id: sphinxexpr.cpp 3129 2012-03-01 07:18:52Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxexpr.h" #include "sphinxudf.h" #include "sphinxutils.h" #include "sphinxint.h" #include #include #if !USE_WINDOWS #include #include #ifdef HAVE_DLOPEN #include #endif // HAVE_DLOPEN #endif // !USE_WINDOWS ////////////////////////////////////////////////////////////////////////// #ifndef M_LOG2E #define M_LOG2E 1.44269504088896340736 #endif #ifndef M_LOG10E #define M_LOG10E 0.434294481903251827651 #endif #if !USE_WINDOWS #ifndef HAVE_DLERROR #define dlerror() "" #endif // HAVE_DLERROR #endif // !USE_WINDOWS typedef int ( *UdfInit_fn ) ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error ); typedef void ( *UdfDeinit_fn ) ( SPH_UDF_INIT * init ); /// loaded UDF library struct UdfLib_t { void * m_pHandle; ///< handle from dlopen() int m_iFuncs; ///< number of registered functions from this library }; /// registered UDF function struct UdfFunc_t { UdfLib_t * m_pLib; ///< library descriptor (pointer to library hash value) const CSphString * m_pLibName; ///< library name (pointer to library hash key) ESphAttr m_eRetType; ///< function type, currently FLOAT or INT UdfInit_fn m_fnInit; ///< per-query init function, mandatory UdfDeinit_fn m_fnDeinit; ///< per-query deinit function, optional void * m_fnFunc; ///< per-row worker function, mandatory int m_iUserCount; ///< number of active users currently working this function bool m_bToDrop; ///< scheduled for DROP; do not use }; /// UDF call site struct UdfCall_t { UdfFunc_t * m_pUdf; SPH_UDF_INIT m_tInit; SPH_UDF_ARGS m_tArgs; UdfCall_t(); ~UdfCall_t(); }; ////////////////////////////////////////////////////////////////////////// // GLOBALS ////////////////////////////////////////////////////////////////////////// // hack hack hack UservarIntSet_c * ( *g_pUservarsHook )( const CSphString & sUservar ); static bool g_bUdfEnabled = false; static CSphString g_sUdfDir; static CSphStaticMutex g_tUdfMutex; static SmallStringHash_T g_hUdfLibs; static SmallStringHash_T g_hUdfFuncs; ////////////////////////////////////////////////////////////////////////// // UDF CALL SITE ////////////////////////////////////////////////////////////////////////// UdfCall_t::UdfCall_t () { m_pUdf = NULL; m_tInit.func_data = NULL; m_tInit.is_const = false; m_tArgs.arg_count = 0; m_tArgs.arg_types = NULL; m_tArgs.arg_values = NULL; m_tArgs.arg_names = NULL; m_tArgs.str_lengths = NULL; } UdfCall_t::~UdfCall_t () { if ( m_pUdf ) { g_tUdfMutex.Lock (); m_pUdf->m_iUserCount--; g_tUdfMutex.Unlock (); } SafeDeleteArray ( m_tArgs.arg_types ); SafeDeleteArray ( m_tArgs.arg_values ); SafeDeleteArray ( m_tArgs.arg_names ); SafeDeleteArray ( m_tArgs.str_lengths ); } ////////////////////////////////////////////////////////////////////////// // EVALUATION ENGINE ////////////////////////////////////////////////////////////////////////// struct ExprLocatorTraits_t : public ISphExpr { CSphAttrLocator m_tLocator; int m_iLocator; ExprLocatorTraits_t ( const CSphAttrLocator & tLocator, int iLocator ) : m_tLocator ( tLocator ), m_iLocator ( iLocator ) {} virtual void GetDependencyColumns ( CSphVector & dColumns ) const { dColumns.Add ( m_iLocator ); } }; struct Expr_GetInt_c : public ExprLocatorTraits_t { Expr_GetInt_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & tMatch ) const { return (float) tMatch.GetAttr ( m_tLocator ); } // FIXME! OPTIMIZE!!! we can go the short route here virtual int IntEval ( const CSphMatch & tMatch ) const { return (int)tMatch.GetAttr ( m_tLocator ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t)tMatch.GetAttr ( m_tLocator ); } }; struct Expr_GetBits_c : public ExprLocatorTraits_t { Expr_GetBits_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & tMatch ) const { return (float) tMatch.GetAttr ( m_tLocator ); } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int)tMatch.GetAttr ( m_tLocator ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t)tMatch.GetAttr ( m_tLocator ); } }; struct Expr_GetSint_c : public ExprLocatorTraits_t { Expr_GetSint_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & tMatch ) const { return (float)(int)tMatch.GetAttr ( m_tLocator ); } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int)tMatch.GetAttr ( m_tLocator ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int)tMatch.GetAttr ( m_tLocator ); } }; struct Expr_GetFloat_c : public ExprLocatorTraits_t { Expr_GetFloat_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & tMatch ) const { return tMatch.GetAttrFloat ( m_tLocator ); } }; struct Expr_GetString_c : public ExprLocatorTraits_t { const BYTE * m_pStrings; Expr_GetString_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & ) const { assert ( 0 ); return 0; } virtual void SetStringPool ( const BYTE * pStrings ) { m_pStrings = pStrings; } virtual int StringEval ( const CSphMatch & tMatch, const BYTE ** ppStr ) const { SphAttr_t iOff = tMatch.GetAttr ( m_tLocator ); if ( iOff>0 ) return sphUnpackStr ( m_pStrings + iOff, ppStr ); *ppStr = NULL; return 0; } }; struct Expr_GetMva_c : public ExprLocatorTraits_t { const DWORD * m_pMva; Expr_GetMva_c ( const CSphAttrLocator & tLocator, int iLocator ) : ExprLocatorTraits_t ( tLocator, iLocator ) {} virtual float Eval ( const CSphMatch & ) const { assert ( 0 ); return 0; } virtual void SetMVAPool ( const DWORD * pMva ) { m_pMva = pMva; } virtual const DWORD * MvaEval ( const CSphMatch & tMatch ) const { return tMatch.GetAttrMVA ( m_tLocator, m_pMva ); } }; struct Expr_GetConst_c : public ISphExpr { float m_fValue; explicit Expr_GetConst_c ( float fValue ) : m_fValue ( fValue ) {} virtual float Eval ( const CSphMatch & ) const { return m_fValue; } }; struct Expr_GetIntConst_c : public ISphExpr { int m_iValue; explicit Expr_GetIntConst_c ( int iValue ) : m_iValue ( iValue ) {} virtual float Eval ( const CSphMatch & ) const { return (float) m_iValue; } // no assert() here cause generic float Eval() needs to work even on int-evaluator tree virtual int IntEval ( const CSphMatch & ) const { return m_iValue; } virtual int64_t Int64Eval ( const CSphMatch & ) const { return m_iValue; } }; struct Expr_GetInt64Const_c : public ISphExpr { int64_t m_iValue; explicit Expr_GetInt64Const_c ( int64_t iValue ) : m_iValue ( iValue ) {} virtual float Eval ( const CSphMatch & ) const { return (float) m_iValue; } // no assert() here cause generic float Eval() needs to work even on int-evaluator tree virtual int IntEval ( const CSphMatch & ) const { assert ( 0 ); return (int)m_iValue; } virtual int64_t Int64Eval ( const CSphMatch & ) const { return m_iValue; } }; struct Expr_GetStrConst_c : public ISphExpr { CSphString m_sVal; int m_iLen; explicit Expr_GetStrConst_c ( const char * sVal, int iLen ) { if ( iLen>0 ) SqlUnescape ( m_sVal, sVal, iLen ); m_iLen = m_sVal.Length(); } virtual int StringEval ( const CSphMatch &, const BYTE ** ppStr ) const { *ppStr = (const BYTE*) m_sVal.cstr(); return m_iLen; } virtual float Eval ( const CSphMatch & ) const { assert ( 0 ); return 0; } virtual int IntEval ( const CSphMatch & ) const { assert ( 0 ); return 0; } virtual int64_t Int64Eval ( const CSphMatch & ) const { assert ( 0 ); return 0; } }; struct Expr_GetId_c : public ISphExpr { virtual float Eval ( const CSphMatch & tMatch ) const { return (float)tMatch.m_iDocID; } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int)tMatch.m_iDocID; } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t)tMatch.m_iDocID; } }; struct Expr_GetWeight_c : public ISphExpr { virtual float Eval ( const CSphMatch & tMatch ) const { return (float)tMatch.m_iWeight; } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int)tMatch.m_iWeight; } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t)tMatch.m_iWeight; } }; ////////////////////////////////////////////////////////////////////////// struct Expr_Arglist_c : public ISphExpr { CSphVector m_dArgs; Expr_Arglist_c ( ISphExpr * pLeft, ISphExpr * pRight ) { AddArgs ( pLeft ); AddArgs ( pRight ); } ~Expr_Arglist_c () { ARRAY_FOREACH ( i, m_dArgs ) SafeRelease ( m_dArgs[i] ); } void AddArgs ( ISphExpr * pExpr ) { // not an arglist? just add it if ( !pExpr->IsArglist() ) { m_dArgs.Add ( pExpr ); return; } // arglist? take ownership of its args, and dismiss it Expr_Arglist_c * pArgs = (Expr_Arglist_c *) pExpr; ARRAY_FOREACH ( i, pArgs->m_dArgs ) { m_dArgs.Add ( pArgs->m_dArgs[i] ); pArgs->m_dArgs[i] = NULL; } SafeRelease ( pExpr ); } virtual bool IsArglist () const { return true; } virtual float Eval ( const CSphMatch & ) const { assert ( 0 && "internal error: Eval() must not be explicitly called on arglist" ); return 0.0f; } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { ARRAY_FOREACH ( i, m_dArgs ) m_dArgs[i]->GetDependencyColumns ( dColumns ); } }; ////////////////////////////////////////////////////////////////////////// struct Expr_Unary_c : public ISphExpr { ISphExpr * m_pFirst; ~Expr_Unary_c() { SafeRelease ( m_pFirst ); } virtual void SetMVAPool ( const DWORD * pMvaPool ) { m_pFirst->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { m_pFirst->SetStringPool ( pStrings ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { m_pFirst->GetDependencyColumns ( dColumns ); } }; struct Expr_Crc32_c : public Expr_Unary_c { explicit Expr_Crc32_c ( ISphExpr * pFirst ) { m_pFirst = pFirst; } virtual float Eval ( const CSphMatch & tMatch ) const { return (float)IntEval ( tMatch ); } virtual int IntEval ( const CSphMatch & tMatch ) const { const BYTE * pStr; return sphCRC32 ( pStr, m_pFirst->StringEval ( tMatch, &pStr ) ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return IntEval ( tMatch ); } }; static inline int Fibonacci ( int i ) { if ( i<0 ) return 0; int f0 = 0; int f1 = 1; int j = 0; for ( j=0; j+1IntEval ( tMatch ) ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return IntEval ( tMatch ); } }; ////////////////////////////////////////////////////////////////////////// #define FIRST m_pFirst->Eval(tMatch) #define SECOND m_pSecond->Eval(tMatch) #define THIRD m_pThird->Eval(tMatch) #define INTFIRST m_pFirst->IntEval(tMatch) #define INTSECOND m_pSecond->IntEval(tMatch) #define INTTHIRD m_pThird->IntEval(tMatch) #define INT64FIRST m_pFirst->Int64Eval(tMatch) #define INT64SECOND m_pSecond->Int64Eval(tMatch) #define INT64THIRD m_pThird->Int64Eval(tMatch) #define DECLARE_UNARY_TRAITS(_classname,_expr) \ struct _classname : public ISphExpr \ { \ ISphExpr * m_pFirst; \ explicit _classname ( ISphExpr * pFirst ) : m_pFirst ( pFirst ) {}; \ ~_classname () { SafeRelease ( m_pFirst ); } \ virtual void SetMVAPool ( const DWORD * pMvaPool ) { m_pFirst->SetMVAPool ( pMvaPool ); } \ virtual void SetStringPool ( const BYTE * pStrings ) { m_pFirst->SetStringPool ( pStrings ); } \ virtual float Eval ( const CSphMatch & tMatch ) const { return _expr; } \ virtual void GetDependencyColumns ( CSphVector & dColumns ) const { m_pFirst->GetDependencyColumns ( dColumns ); } \ #define DECLARE_UNARY_FLT(_classname,_expr) \ DECLARE_UNARY_TRAITS ( _classname, _expr ) \ }; #define DECLARE_UNARY_INT(_classname,_expr,_expr2,_expr3) \ DECLARE_UNARY_TRAITS ( _classname, _expr ) \ virtual int IntEval ( const CSphMatch & tMatch ) const { return _expr2; } \ virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return _expr3; } \ }; #define IABS(_arg) ( (_arg)>0 ? (_arg) : (-_arg) ) DECLARE_UNARY_INT ( Expr_Neg_c, -FIRST, -INTFIRST, -INT64FIRST ) DECLARE_UNARY_INT ( Expr_Abs_c, fabs(FIRST), IABS(INTFIRST), IABS(INT64FIRST) ) DECLARE_UNARY_FLT ( Expr_Ceil_c, float(ceil(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Floor_c, float(floor(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Sin_c, float(sin(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Cos_c, float(cos(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Ln_c, float(log(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Log2_c, float(log(FIRST)*M_LOG2E) ) DECLARE_UNARY_FLT ( Expr_Log10_c, float(log(FIRST)*M_LOG10E) ) DECLARE_UNARY_FLT ( Expr_Exp_c, float(exp(FIRST)) ) DECLARE_UNARY_FLT ( Expr_Sqrt_c, float(sqrt(FIRST)) ) DECLARE_UNARY_INT ( Expr_NotInt_c, (float)(INTFIRST?0:1), INTFIRST?0:1, INTFIRST?0:1 ); DECLARE_UNARY_INT ( Expr_NotInt64_c, (float)(INT64FIRST?0:1), INT64FIRST?0:1, INT64FIRST?0:1 ); DECLARE_UNARY_INT ( Expr_Sint_c, (float)(INTFIRST), INTFIRST, INTFIRST ) ////////////////////////////////////////////////////////////////////////// #define DECLARE_BINARY_TRAITS(_classname) \ struct _classname : public ISphExpr \ { \ ISphExpr * m_pFirst; \ ISphExpr * m_pSecond; \ _classname ( ISphExpr * pFirst, ISphExpr * pSecond ) : m_pFirst ( pFirst ), m_pSecond ( pSecond ) {} \ ~_classname () { SafeRelease ( m_pFirst ); SafeRelease ( m_pSecond ); } \ virtual void SetMVAPool ( const DWORD * pMvaPool ) { m_pFirst->SetMVAPool ( pMvaPool ); m_pSecond->SetMVAPool ( pMvaPool ); } \ virtual void SetStringPool ( const BYTE * pStrings ) { m_pFirst->SetStringPool ( pStrings ); m_pSecond->SetStringPool ( pStrings ); } \ virtual void GetDependencyColumns ( CSphVector & dColumns ) const \ { \ m_pFirst->GetDependencyColumns ( dColumns ); \ m_pSecond->GetDependencyColumns ( dColumns ); \ } \ #define DECLARE_END() }; #define DECLARE_BINARY_FLT(_classname,_expr) \ DECLARE_BINARY_TRAITS ( _classname ) \ virtual float Eval ( const CSphMatch & tMatch ) const { return _expr; } \ }; #define DECLARE_BINARY_INT(_classname,_expr,_expr2,_expr3) \ DECLARE_BINARY_TRAITS ( _classname ) \ virtual float Eval ( const CSphMatch & tMatch ) const { return _expr; } \ virtual int IntEval ( const CSphMatch & tMatch ) const { return _expr2; } \ virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return _expr3; } \ }; #define DECLARE_BINARY_POLY(_classname,_expr,_expr2,_expr3) \ DECLARE_BINARY_INT ( _classname##Float_c, _expr, (int)Eval(tMatch), (int64_t)Eval(tMatch ) ) \ DECLARE_BINARY_INT ( _classname##Int_c, (float)IntEval(tMatch), _expr2, (int64_t)IntEval(tMatch) ) \ DECLARE_BINARY_INT ( _classname##Int64_c, (float)Int64Eval(tMatch), (int)Int64Eval(tMatch), _expr3 ) #define IFFLT(_expr) ( (_expr) ? 1.0f : 0.0f ) #define IFINT(_expr) ( (_expr) ? 1 : 0 ) DECLARE_BINARY_INT ( Expr_Add_c, FIRST + SECOND, INTFIRST + INTSECOND, INT64FIRST + INT64SECOND ) DECLARE_BINARY_INT ( Expr_Sub_c, FIRST - SECOND, INTFIRST - INTSECOND, INT64FIRST - INT64SECOND ) DECLARE_BINARY_INT ( Expr_Mul_c, FIRST * SECOND, INTFIRST * INTSECOND, INT64FIRST * INT64SECOND ) DECLARE_BINARY_FLT ( Expr_Div_c, FIRST / SECOND ) DECLARE_BINARY_INT ( Expr_BitAnd_c, (float)(int(FIRST)&int(SECOND)), INTFIRST & INTSECOND, INT64FIRST & INT64SECOND ) DECLARE_BINARY_INT ( Expr_BitOr_c, (float)(int(FIRST)|int(SECOND)), INTFIRST | INTSECOND, INT64FIRST | INT64SECOND ) DECLARE_BINARY_INT ( Expr_Mod_c, (float)(int(FIRST)%int(SECOND)), INTFIRST % INTSECOND, INT64FIRST % INT64SECOND ) DECLARE_BINARY_TRAITS ( Expr_Idiv_c ) virtual float Eval ( const CSphMatch & tMatch ) const { int iSecond = int(SECOND); return iSecond ? float(int(FIRST)/iSecond) : 0.0f; } virtual int IntEval ( const CSphMatch & tMatch ) const { int iSecond = INTSECOND; return iSecond ? ( INTFIRST / iSecond ) : 0; } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { int64_t iSecond = INT64SECOND; return iSecond ? ( INT64FIRST / iSecond ) : 0; } DECLARE_END() DECLARE_BINARY_POLY ( Expr_Lt, IFFLT ( FIRSTSECOND ), IFINT ( INTFIRST>INTSECOND ), IFINT ( INT64FIRST>INT64SECOND ) ) DECLARE_BINARY_POLY ( Expr_Lte, IFFLT ( FIRST<=SECOND ), IFINT ( INTFIRST<=INTSECOND ), IFINT ( INT64FIRST<=INT64SECOND ) ) DECLARE_BINARY_POLY ( Expr_Gte, IFFLT ( FIRST>=SECOND ), IFINT ( INTFIRST>=INTSECOND ), IFINT ( INT64FIRST>=INT64SECOND ) ) DECLARE_BINARY_POLY ( Expr_Eq, IFFLT ( fabs ( FIRST-SECOND )<=1e-6 ), IFINT ( INTFIRST==INTSECOND ), IFINT ( INT64FIRST==INT64SECOND ) ) DECLARE_BINARY_POLY ( Expr_Ne, IFFLT ( fabs ( FIRST-SECOND )>1e-6 ), IFINT ( INTFIRST!=INTSECOND ), IFINT ( INT64FIRST!=INT64SECOND ) ) DECLARE_BINARY_INT ( Expr_Min_c, Min ( FIRST, SECOND ), Min ( INTFIRST, INTSECOND ), Min ( INT64FIRST, INT64SECOND ) ) DECLARE_BINARY_INT ( Expr_Max_c, Max ( FIRST, SECOND ), Max ( INTFIRST, INTSECOND ), Max ( INT64FIRST, INT64SECOND ) ) DECLARE_BINARY_FLT ( Expr_Pow_c, float ( pow ( FIRST, SECOND ) ) ) DECLARE_BINARY_POLY ( Expr_And, FIRST!=0.0f && SECOND!=0.0f, IFINT ( INTFIRST && INTSECOND ), IFINT ( INT64FIRST && INT64SECOND ) ) DECLARE_BINARY_POLY ( Expr_Or, FIRST!=0.0f || SECOND!=0.0f, IFINT ( INTFIRST || INTSECOND ), IFINT ( INT64FIRST || INT64SECOND ) ) ////////////////////////////////////////////////////////////////////////// #define DECLARE_TERNARY(_classname,_expr,_expr2,_expr3) \ struct _classname : public ISphExpr \ { \ ISphExpr * m_pFirst; \ ISphExpr * m_pSecond; \ ISphExpr * m_pThird; \ _classname ( ISphExpr * pFirst, ISphExpr * pSecond, ISphExpr * pThird ) : m_pFirst ( pFirst ), m_pSecond ( pSecond ), m_pThird ( pThird ) {} \ ~_classname () { SafeRelease ( m_pFirst ); SafeRelease ( m_pSecond ); SafeRelease ( m_pThird ); } \ virtual void SetMVAPool ( const DWORD * pMvaPool ) { m_pFirst->SetMVAPool ( pMvaPool ); m_pSecond->SetMVAPool ( pMvaPool ); m_pThird->SetMVAPool ( pMvaPool ); } \ virtual void SetStringPool ( const BYTE * pStrings ) { m_pFirst->SetStringPool ( pStrings ); m_pSecond->SetStringPool ( pStrings ); m_pThird->SetStringPool ( pStrings ); } \ virtual float Eval ( const CSphMatch & tMatch ) const { return _expr; } \ virtual int IntEval ( const CSphMatch & tMatch ) const { return _expr2; } \ virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return _expr3; } \ virtual void GetDependencyColumns ( CSphVector & dColumns ) const \ { \ m_pFirst->GetDependencyColumns ( dColumns ); \ m_pSecond->GetDependencyColumns ( dColumns ); \ m_pThird->GetDependencyColumns ( dColumns ); \ } \ }; DECLARE_TERNARY ( Expr_If_c, ( FIRST!=0.0f ) ? SECOND : THIRD, INTFIRST ? INTSECOND : INTTHIRD, INT64FIRST ? INT64SECOND : INT64THIRD ) DECLARE_TERNARY ( Expr_Madd_c, FIRST*SECOND+THIRD, INTFIRST*INTSECOND + INTTHIRD, INT64FIRST*INT64SECOND + INT64THIRD ) DECLARE_TERNARY ( Expr_Mul3_c, FIRST*SECOND*THIRD, INTFIRST*INTSECOND*INTTHIRD, INT64FIRST*INT64SECOND*INT64THIRD ) ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS void localtime_r ( const time_t * clock, struct tm * res ) { *res = *localtime ( clock ); // FIXME?! } #endif #define DECLARE_TIMESTAMP(_classname,_expr) \ DECLARE_UNARY_TRAITS ( _classname, (float)IntEval(tMatch) ) \ virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return IntEval(tMatch); } \ virtual int IntEval ( const CSphMatch & tMatch ) const \ { \ time_t ts = (time_t)FIRST; \ struct tm s; \ localtime_r ( &ts, &s ); \ return _expr; \ } \ }; DECLARE_TIMESTAMP ( Expr_Day_c, s.tm_mday ); DECLARE_TIMESTAMP ( Expr_Month_c, s.tm_mon+1 ); DECLARE_TIMESTAMP ( Expr_Year_c, s.tm_year+1900 ); DECLARE_TIMESTAMP ( Expr_YearMonth_c, (s.tm_year+1900)*100+s.tm_mon+1 ); DECLARE_TIMESTAMP ( Expr_YearMonthDay_c, (s.tm_year+1900)*10000+(s.tm_mon+1)*100+s.tm_mday ); ////////////////////////////////////////////////////////////////////////// // PARSER INTERNALS ////////////////////////////////////////////////////////////////////////// #include "yysphinxexpr.h" /// known functions enum Func_e { FUNC_NOW, FUNC_ABS, FUNC_CEIL, FUNC_FLOOR, FUNC_SIN, FUNC_COS, FUNC_LN, FUNC_LOG2, FUNC_LOG10, FUNC_EXP, FUNC_SQRT, FUNC_BIGINT, FUNC_SINT, FUNC_CRC32, FUNC_FIBONACCI, FUNC_DAY, FUNC_MONTH, FUNC_YEAR, FUNC_YEARMONTH, FUNC_YEARMONTHDAY, FUNC_MIN, FUNC_MAX, FUNC_POW, FUNC_IDIV, FUNC_IF, FUNC_MADD, FUNC_MUL3, FUNC_INTERVAL, FUNC_IN, FUNC_BITDOT, FUNC_GEODIST }; struct FuncDesc_t { const char * m_sName; int m_iArgs; Func_e m_eFunc; ESphAttr m_eRet; }; static FuncDesc_t g_dFuncs[] = { { "now", 0, FUNC_NOW, SPH_ATTR_INTEGER }, { "abs", 1, FUNC_ABS, SPH_ATTR_NONE }, { "ceil", 1, FUNC_CEIL, SPH_ATTR_FLOAT }, { "floor", 1, FUNC_FLOOR, SPH_ATTR_FLOAT }, { "sin", 1, FUNC_SIN, SPH_ATTR_FLOAT }, { "cos", 1, FUNC_COS, SPH_ATTR_FLOAT }, { "ln", 1, FUNC_LN, SPH_ATTR_FLOAT }, { "log2", 1, FUNC_LOG2, SPH_ATTR_FLOAT }, { "log10", 1, FUNC_LOG10, SPH_ATTR_FLOAT }, { "exp", 1, FUNC_EXP, SPH_ATTR_FLOAT }, { "sqrt", 1, FUNC_SQRT, SPH_ATTR_FLOAT }, { "bigint", 1, FUNC_BIGINT, SPH_ATTR_BIGINT }, // type-enforcer special as-if-function { "sint", 1, FUNC_SINT, SPH_ATTR_BIGINT }, // type-enforcer special as-if-function { "crc32", 1, FUNC_CRC32, SPH_ATTR_INTEGER }, { "fibonacci", 1, FUNC_FIBONACCI, SPH_ATTR_INTEGER }, { "day", 1, FUNC_DAY, SPH_ATTR_INTEGER }, { "month", 1, FUNC_MONTH, SPH_ATTR_INTEGER }, { "year", 1, FUNC_YEAR, SPH_ATTR_INTEGER }, { "yearmonth", 1, FUNC_YEARMONTH, SPH_ATTR_INTEGER }, { "yearmonthday", 1, FUNC_YEARMONTHDAY, SPH_ATTR_INTEGER }, { "min", 2, FUNC_MIN, SPH_ATTR_NONE }, { "max", 2, FUNC_MAX, SPH_ATTR_NONE }, { "pow", 2, FUNC_POW, SPH_ATTR_FLOAT }, { "idiv", 2, FUNC_IDIV, SPH_ATTR_NONE }, { "if", 3, FUNC_IF, SPH_ATTR_NONE }, { "madd", 3, FUNC_MADD, SPH_ATTR_NONE }, { "mul3", 3, FUNC_MUL3, SPH_ATTR_NONE }, { "interval", -2, FUNC_INTERVAL, SPH_ATTR_INTEGER }, { "in", -1, FUNC_IN, SPH_ATTR_INTEGER }, { "bitdot", -1, FUNC_BITDOT, SPH_ATTR_NONE }, { "geodist", 4, FUNC_GEODIST, SPH_ATTR_FLOAT } }; ////////////////////////////////////////////////////////////////////////// /// check for type based on int value static inline ESphAttr GetIntType ( int64_t iValue ) { return ( iValue>=(int64_t)INT_MIN && iValue<=(int64_t)INT_MAX ) ? SPH_ATTR_INTEGER : SPH_ATTR_BIGINT; } /// list of constants class ConstList_c { public: CSphVector m_dInts; ///< dword/int64 storage CSphVector m_dFloats; ///< float storage ESphAttr m_eRetType; ///< SPH_ATTR_INTEGER, SPH_ATTR_BIGINT, or SPH_ATTR_FLOAT public: ConstList_c () : m_eRetType ( SPH_ATTR_INTEGER ) {} void Add ( int64_t iValue ) { if ( m_eRetType!=SPH_ATTR_FLOAT ) { m_eRetType = GetIntType ( iValue ); m_dInts.Add ( iValue ); } else { m_dFloats.Add ( (float)iValue ); } } void Add ( float fValue ) { if ( m_eRetType!=SPH_ATTR_FLOAT ) { assert ( m_dFloats.GetLength()==0 ); ARRAY_FOREACH ( i, m_dInts ) m_dFloats.Add ( (float)m_dInts[i] ); m_dInts.Reset (); m_eRetType = SPH_ATTR_FLOAT; } m_dFloats.Add ( fValue ); } }; /// expression tree node struct ExprNode_t { int m_iToken; ///< token type, including operators ESphAttr m_eRetType; ///< result type ESphAttr m_eArgType; ///< args type CSphAttrLocator m_tLocator; ///< attribute locator, for TOK_ATTR type int m_iLocator; ///< index of attribute locator in schema union { int64_t m_iConst; ///< constant value, for TOK_CONST_INT type float m_fConst; ///< constant value, for TOK_CONST_FLOAT type int m_iFunc; ///< built-in function id, for TOK_FUNC type int m_iArgs; ///< args count, for arglist (token==',') type ConstList_c * m_pConsts; ///< constants list, for TOK_CONST_LIST type }; int m_iLeft; int m_iRight; ExprNode_t () : m_iToken ( 0 ), m_eRetType ( SPH_ATTR_NONE ), m_eArgType ( SPH_ATTR_NONE ), m_iLocator ( -1 ), m_iLeft ( -1 ), m_iRight ( -1 ) {} float FloatVal() { assert ( m_iToken==TOK_CONST_INT || m_iToken==TOK_CONST_FLOAT ); return ( m_iToken==TOK_CONST_INT ) ? (float)m_iConst : m_fConst; } }; /// expression parser class ExprParser_t { friend int yylex ( YYSTYPE * lvalp, ExprParser_t * pParser ); friend int yyparse ( ExprParser_t * pParser ); friend void yyerror ( ExprParser_t * pParser, const char * sMessage ); public: ExprParser_t ( CSphSchema * pExtra, ISphExprHook * pHook ) : m_pHook ( pHook ) , m_pExtra ( pExtra ) {} ~ExprParser_t (); ISphExpr * Parse ( const char * sExpr, const CSphSchema & tSchema, ESphAttr * pAttrType, bool * pUsesWeight, CSphString & sError ); protected: int m_iParsed; ///< filled by yyparse() at the very end CSphString m_sLexerError; CSphString m_sParserError; CSphString m_sCreateError; ISphExprHook * m_pHook; protected: ESphAttr GetWidestRet ( int iLeft, int iRight ); int AddNodeInt ( int64_t iValue ); int AddNodeFloat ( float fValue ); int AddNodeString ( int64_t iValue ); int AddNodeAttr ( int iTokenType, uint64_t uAttrLocator ); int AddNodeID (); int AddNodeWeight (); int AddNodeOp ( int iOp, int iLeft, int iRight ); int AddNodeFunc ( int iFunc, int iLeft, int iRight=-1 ); int AddNodeUdf ( int iCall, int iArg ); int AddNodeConstlist ( int64_t iValue ); int AddNodeConstlist ( float iValue ); void AppendToConstlist ( int iNode, int64_t iValue ); void AppendToConstlist ( int iNode, float iValue ); int AddNodeUservar ( int iUservar ); int AddNodeHookIdent ( int iID ); int AddNodeHookFunc ( int iID, int iLeft ); private: const char * m_sExpr; const char * m_pCur; const char * m_pLastTokenStart; const CSphSchema * m_pSchema; CSphVector m_dNodes; CSphVector m_dUservars; CSphVector m_dUdfCalls; CSphSchema * m_pExtra; int m_iConstNow; private: int GetToken ( YYSTYPE * lvalp ); void GatherArgTypes ( int iNode, CSphVector & dTypes ); void GatherArgNodes ( int iNode, CSphVector & dNodes ); void GatherArgRetTypes ( int iNode, CSphVector & dTypes ); bool CheckForConstSet ( int iArgsNode, int iSkip ); int ParseAttr ( int iAttr, const char* sTok, YYSTYPE * lvalp ); template < typename T > void WalkTree ( int iRoot, T & FUNCTOR ); void Optimize ( int iNode ); void Dump ( int iNode ); ISphExpr * CreateTree ( int iNode ); ISphExpr * CreateIntervalNode ( int iArgsNode, CSphVector & dArgs ); ISphExpr * CreateInNode ( int iNode ); ISphExpr * CreateGeodistNode ( int iArgs ); ISphExpr * CreateBitdotNode ( int iArgsNode, CSphVector & dArgs ); ISphExpr * CreateUdfNode ( int iCall, ISphExpr * pLeft ); }; ////////////////////////////////////////////////////////////////////////// /// parse that numeric constant static int ParseNumeric ( YYSTYPE * lvalp, const char ** ppStr ) { assert ( lvalp && ppStr && *ppStr ); // try float route char * pEnd = NULL; float fRes = (float) strtod ( *ppStr, &pEnd ); // try int route int64_t iRes = 0; bool bInt = true; for ( const char * p=(*ppStr); piConst = iRes; return TOK_CONST_INT; } else { lvalp->fConst = fRes; return TOK_CONST_FLOAT; } } static uint64_t sphPackAttrLocator ( const CSphAttrLocator & tLoc, int iLocator ) { assert ( iLocator>=0 && iLocator<=0xff ); uint64_t uIndex = 0; uIndex = ( tLoc.m_iBitOffset<<16 ) + tLoc.m_iBitCount + ( (uint64_t)iLocator<<32 ); if ( tLoc.m_bDynamic ) uIndex |= ( U64C(1)<<63 ); return uIndex; } static void sphUnpackAttrLocator ( uint64_t uIndex, ExprNode_t * pNode ) { assert ( pNode ); pNode->m_tLocator.m_iBitOffset = (int)( ( uIndex>>16 ) & 0xffff ); pNode->m_tLocator.m_iBitCount = (int)( uIndex & 0xffff ); pNode->m_tLocator.m_bDynamic = ( ( uIndex & ( U64C(1)<<63 ) )!=0 ); pNode->m_iLocator = (int)( ( uIndex>>32 ) & 0xff ); } int ExprParser_t::ParseAttr ( int iAttr, const char* sTok, YYSTYPE * lvalp ) { // check attribute type and width const CSphColumnInfo & tCol = m_pSchema->GetAttr ( iAttr ); int iRes = -1; switch ( tCol.m_eAttrType ) { case SPH_ATTR_FLOAT: iRes = TOK_ATTR_FLOAT; break; case SPH_ATTR_UINT32SET: iRes = TOK_ATTR_MVA32; break; case SPH_ATTR_UINT64SET: iRes = TOK_ATTR_MVA64; break; case SPH_ATTR_STRING: iRes = TOK_ATTR_STRING; break; case SPH_ATTR_INTEGER: case SPH_ATTR_TIMESTAMP: case SPH_ATTR_BOOL: case SPH_ATTR_BIGINT: case SPH_ATTR_WORDCOUNT: iRes = tCol.m_tLocator.IsBitfield() ? TOK_ATTR_BITS : TOK_ATTR_INT; break; default: m_sLexerError.SetSprintf ( "attribute '%s' is of unsupported type (type=%d)", sTok, tCol.m_eAttrType ); return -1; } if ( m_pExtra ) m_pExtra->AddAttr ( tCol, true ); lvalp->iAttrLocator = sphPackAttrLocator ( tCol.m_tLocator, iAttr ); return iRes; } /// a lexer of my own /// returns token id and fills lvalp on success /// returns -1 and fills sError on failure int ExprParser_t::GetToken ( YYSTYPE * lvalp ) { // skip whitespace, check eof while ( isspace ( *m_pCur ) ) m_pCur++; m_pLastTokenStart = m_pCur; if ( !*m_pCur ) return 0; // check for constant if ( isdigit ( *m_pCur ) ) return ParseNumeric ( lvalp, &m_pCur ); // check for field, function, or magic name if ( sphIsAttr ( m_pCur[0] ) || ( m_pCur[0]=='@' && sphIsAttr ( m_pCur[1] ) && !isdigit ( m_pCur[1] ) ) ) { // get token const char * pStart = m_pCur++; while ( sphIsAttr ( *m_pCur ) ) m_pCur++; CSphString sTok; sTok.SetBinary ( pStart, m_pCur-pStart ); sTok.ToLower (); // check for magic name if ( sTok=="@id" ) return TOK_ATID; if ( sTok=="@weight" ) return TOK_ATWEIGHT; if ( sTok=="id" ) return TOK_ID; if ( sTok=="weight" ) return TOK_WEIGHT; if ( sTok=="distinct" ) return TOK_DISTINCT; if ( sTok=="@geodist" ) { int iGeodist = m_pSchema->GetAttrIndex("@geodist"); if ( iGeodist==-1 ) { m_sLexerError = "geoanchor is not set, @geodist expression unavailable"; return -1; } const CSphAttrLocator & tLoc = m_pSchema->GetAttr ( iGeodist ).m_tLocator; lvalp->iAttrLocator = sphPackAttrLocator ( tLoc, iGeodist ); return TOK_ATTR_FLOAT; } // check for uservar if ( pStart[0]=='@' ) { lvalp->iNode = m_dUservars.GetLength(); m_dUservars.Add ( sTok ); return TOK_USERVAR; } // check for keyword if ( sTok=="and" ) { return TOK_AND; } if ( sTok=="or" ) { return TOK_OR; } if ( sTok=="not" ) { return TOK_NOT; } if ( sTok=="div" ) { return TOK_DIV; } if ( sTok=="mod" ) { return TOK_MOD; } if ( sTok=="count" ) { int iAttr = m_pSchema->GetAttrIndex ( "count" ); if ( iAttr>=0 ) ParseAttr ( iAttr, sTok.cstr(), lvalp ); return TOK_COUNT; } // check for attribute int iAttr = m_pSchema->GetAttrIndex ( sTok.cstr() ); if ( iAttr>=0 ) return ParseAttr ( iAttr, sTok.cstr(), lvalp ); // check for function sTok.ToLower(); for ( int i=0; iiFunc = i; return g_dFuncs[i].m_eFunc==FUNC_IN ? TOK_FUNC_IN : TOK_FUNC; } // ask hook if ( m_pHook ) { int iID = m_pHook->IsKnownIdent ( sTok.cstr() ); if ( iID>=0 ) { lvalp->iNode = iID; return TOK_HOOK_IDENT; } iID = m_pHook->IsKnownFunc ( sTok.cstr() ); if ( iID>=0 ) { lvalp->iNode = iID; return TOK_HOOK_FUNC; } } // check for UDF if ( g_bUdfEnabled ) { g_tUdfMutex.Lock(); UdfFunc_t * pUdf = g_hUdfFuncs ( sTok ); if ( pUdf ) { if ( pUdf->m_bToDrop ) pUdf = NULL; // DROP in progress, can not use else pUdf->m_iUserCount++; // protection against concurrent DROP (decrements in ~UdfCall_t()) g_tUdfMutex.Unlock(); lvalp->iNode = m_dUdfCalls.GetLength(); m_dUdfCalls.Add ( new UdfCall_t() ); m_dUdfCalls.Last()->m_pUdf = pUdf; return TOK_UDF; } g_tUdfMutex.Unlock(); } m_sLexerError.SetSprintf ( "unknown identifier '%s' (not an attribute, not a function)", sTok.cstr() ); return -1; } // check for known operators, then switch ( *m_pCur ) { case '+': case '-': case '*': case '/': case '(': case ')': case ',': case '&': case '|': case '%': return *m_pCur++; case '<': m_pCur++; if ( *m_pCur=='>' ) { m_pCur++; return TOK_NE; } if ( *m_pCur=='=' ) { m_pCur++; return TOK_LTE; } return '<'; case '>': m_pCur++; if ( *m_pCur=='=' ) { m_pCur++; return TOK_GTE; } return '>'; case '=': m_pCur++; if ( *m_pCur=='=' ) m_pCur++; return TOK_EQ; // special case for float values without leading zero case '.': { char * pEnd = NULL; lvalp->fConst = (float) strtod ( m_pCur, &pEnd ); if ( pEnd ) { m_pCur = pEnd; return TOK_CONST_FLOAT; } break; } case '\'': case '"': { const char cEnd = *m_pCur; for ( const char * s = m_pCur+1; *s; s++ ) { if ( *s==cEnd ) { int iBeg = (int)( m_pCur-m_sExpr ); int iLen = (int)( s-m_sExpr ) - iBeg + 1; lvalp->iConst = ( int64_t(iBeg)<<32 ) + iLen; m_pCur = s+1; return TOK_CONST_STRING; } else if ( *s=='\\' ) { s++; if ( !*s ) break; } } m_sLexerError.SetSprintf ( "unterminated string constant near '%s'", m_pCur ); return -1; } } m_sLexerError.SetSprintf ( "unknown operator '%c' near '%s'", *m_pCur, m_pCur ); return -1; } /// is add/sub? static inline bool IsAddSub ( const ExprNode_t * pNode ) { return pNode->m_iToken=='+' || pNode->m_iToken=='-'; } /// is arithmetic? static inline bool IsAri ( const ExprNode_t * pNode ) { int iTok = pNode->m_iToken; return iTok=='+' || iTok=='-' || iTok=='*' || iTok=='/'; } /// is constant? static inline bool IsConst ( const ExprNode_t * pNode ) { return pNode->m_iToken==TOK_CONST_INT || pNode->m_iToken==TOK_CONST_FLOAT; } /// float value of a constant static inline float FloatVal ( const ExprNode_t * pNode ) { assert ( IsConst(pNode) ); return pNode->m_iToken==TOK_CONST_INT ? (float)pNode->m_iConst : pNode->m_fConst; } /// optimize subtree void ExprParser_t::Optimize ( int iNode ) { if ( iNode<0 ) return; Optimize ( m_dNodes[iNode].m_iLeft ); Optimize ( m_dNodes[iNode].m_iRight ); ExprNode_t * pRoot = &m_dNodes[iNode]; ExprNode_t * pLeft = ( pRoot->m_iLeft>=0 ) ? &m_dNodes[pRoot->m_iLeft] : NULL; ExprNode_t * pRight = ( pRoot->m_iRight>=0 ) ? &m_dNodes[pRoot->m_iRight] : NULL; // arithmetic expression with constants if ( IsAri(pRoot) ) { // optimize fully-constant expressions if ( IsConst(pLeft) && IsConst(pRight) ) { if ( pLeft->m_iToken==TOK_CONST_INT && pRight->m_iToken==TOK_CONST_INT && pRoot->m_iToken!='/' ) { switch ( pRoot->m_iToken ) { case '+': pRoot->m_iConst = pLeft->m_iConst + pRight->m_iConst; break; case '-': pRoot->m_iConst = pLeft->m_iConst - pRight->m_iConst; break; case '*': pRoot->m_iConst = pLeft->m_iConst * pRight->m_iConst; break; default: assert ( 0 && "internal error: unhandled arithmetic token during const-int optimization" ); } pRoot->m_iToken = TOK_CONST_INT; } else { float fLeft = FloatVal(pLeft); float fRight = FloatVal(pRight); switch ( pRoot->m_iToken ) { case '+': pRoot->m_fConst = fLeft + fRight; break; case '-': pRoot->m_fConst = fLeft - fRight; break; case '*': pRoot->m_fConst = fLeft * fRight; break; case '/': pRoot->m_fConst = fLeft / fRight; break; default: assert ( 0 && "internal error: unhandled arithmetic token during const-float optimization" ); } pRoot->m_iToken = TOK_CONST_FLOAT; } return; } // canonize (expr op const), move const to the left if ( IsConst(pRight) ) { assert ( !IsConst(pLeft) ); Swap ( pRoot->m_iLeft, pRoot->m_iRight ); Swap ( pLeft, pRight ); // fixup (expr-const) to ((-const)+expr) if ( pRoot->m_iToken=='-' ) { pRoot->m_iToken = '+'; if ( pLeft->m_iToken==TOK_CONST_INT ) pLeft->m_iConst *= -1; else pLeft->m_fConst *= -1; } // fixup (expr/const) to ((1/const)*expr) if ( pRoot->m_iToken=='/' ) { pRoot->m_iToken = '*'; pLeft->m_fConst = 1.0f / FloatVal(pLeft); pLeft->m_iToken = TOK_CONST_FLOAT; } } // optimize compatible operations with constants if ( IsConst(pLeft) && IsAri(pRight) && IsAddSub(pRoot)==IsAddSub(pRight) && IsConst ( &m_dNodes[pRight->m_iLeft] ) ) { ExprNode_t * pConst = &m_dNodes[pRight->m_iLeft]; ExprNode_t * pExpr = &m_dNodes[pRight->m_iRight]; assert ( !IsConst(pExpr) ); // must had been optimized // optimize (left op (const op2 expr)) to ((left op const) op*op2 expr) if ( IsAddSub(pRoot) ) { // fold consts int iSign = ( ( pRoot->m_iToken=='+' ) ? 1 : -1 ); if ( pLeft->m_iToken==TOK_CONST_INT && pConst->m_iToken==TOK_CONST_INT ) { pLeft->m_iConst += iSign*pConst->m_iConst; } else { pLeft->m_fConst = FloatVal(pLeft) + iSign*FloatVal(pConst); pLeft->m_iToken = TOK_CONST_FLOAT; } // fold ops pRoot->m_iToken = ( pRoot->m_iToken==pRight->m_iToken ) ? '+' : '-'; } else { // fols consts if ( pRoot->m_iToken=='*' && pLeft->m_iToken==TOK_CONST_INT && pConst->m_iToken==TOK_CONST_INT ) { pLeft->m_iConst *= pConst->m_iConst; } else { if ( pRoot->m_iToken=='*' ) pLeft->m_fConst = FloatVal(pLeft) * FloatVal(pConst); else pLeft->m_fConst = FloatVal(pLeft) / FloatVal(pConst); pLeft->m_iToken = TOK_CONST_FLOAT; } // fold ops pRoot->m_iToken = ( pRoot->m_iToken==pRight->m_iToken ) ? '*' : '/'; } // promote expr arg pRoot->m_iRight = pRight->m_iRight; pRight = pExpr; } // promote children constants if ( IsAri(pLeft) && IsAddSub(pLeft)==IsAddSub(pRoot) && IsConst ( &m_dNodes[pLeft->m_iLeft] ) ) { // ((const op lr) op2 right) gets replaced with (const op (lr op2/op right)) // constant gets promoted one level up int iConst = pLeft->m_iLeft; pLeft->m_iLeft = pLeft->m_iRight; pLeft->m_iRight = pRoot->m_iRight; // (c op lr) -> (lr ... r) switch ( pLeft->m_iToken ) { case '+': case '*': // (c + lr) op r -> c + (lr op r) // (c * lr) op r -> c * (lr op r) Swap ( pLeft->m_iToken, pRoot->m_iToken ); break; case '-': // (c - lr) + r -> c - (lr - r) // (c - lr) - r -> c - (lr + r) pLeft->m_iToken = ( pRoot->m_iToken=='+' ? '-' : '+' ); pRoot->m_iToken = '-'; break; case '/': // (c / lr) * r -> c * (r / lr) // (c / lr) / r -> c / (r * lr) Swap ( pLeft->m_iLeft, pLeft->m_iRight ); pLeft->m_iToken = ( pRoot->m_iToken=='*' ) ? '/' : '*'; break; default: assert ( 0 && "internal error: unhandled op in left-const promotion" ); } pRoot->m_iRight = pRoot->m_iLeft; pRoot->m_iLeft = iConst; pLeft = &m_dNodes[pRoot->m_iLeft]; pRight = &m_dNodes[pRoot->m_iRight]; } } // madd, mul3 // FIXME! separate pass for these? otherwise (2+(a*b))+3 won't get const folding if ( ( pRoot->m_iToken=='+' || pRoot->m_iToken=='*' ) && ( pLeft->m_iToken=='*' || pRight->m_iToken=='*' ) ) { if ( pLeft->m_iToken!='*' ) { Swap ( pRoot->m_iLeft, pRoot->m_iRight ); Swap ( pLeft, pRight ); } pLeft->m_iToken = ','; int iLeft = pRoot->m_iLeft; int iRight = pRoot->m_iRight; pRoot->m_iFunc = ( pRoot->m_iToken=='+' ) ? FUNC_MADD : FUNC_MUL3; pRoot->m_iToken = TOK_FUNC; pRoot->m_iLeft = m_dNodes.GetLength(); pRoot->m_iRight = -1; assert ( g_dFuncs[pRoot->m_iFunc].m_eFunc==pRoot->m_iFunc ); ExprNode_t & tArgs = m_dNodes.Add(); // invalidates all pointers! tArgs.m_iToken = ','; tArgs.m_iLeft = iLeft; tArgs.m_iRight = iRight; return; } // division by a constant (replace with multiplication by inverse) if ( pRoot->m_iToken=='/' && pRight->m_iToken==TOK_CONST_FLOAT ) { pRight->m_fConst = 1.0f / pRight->m_fConst; pRoot->m_iToken = '*'; return; } // unary function from a constant if ( pRoot->m_iToken==TOK_FUNC && g_dFuncs[pRoot->m_iFunc].m_iArgs==1 && IsConst(pLeft) ) { float fArg = pLeft->m_iToken==TOK_CONST_FLOAT ? pLeft->m_fConst : float(pLeft->m_iConst); switch ( g_dFuncs[pRoot->m_iFunc].m_eFunc ) { case FUNC_ABS: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = fabs(fArg); break; case FUNC_CEIL: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(ceil(fArg)); break; case FUNC_FLOOR: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(floor(fArg)); break; case FUNC_SIN: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(sin(fArg)); break; case FUNC_COS: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(cos(fArg)); break; case FUNC_LN: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(log(fArg)); break; case FUNC_LOG2: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(log(fArg)*M_LOG2E); break; case FUNC_LOG10: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(log(fArg)*M_LOG10E); break; case FUNC_EXP: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(exp(fArg)); break; case FUNC_SQRT: pRoot->m_iToken = TOK_CONST_FLOAT; pRoot->m_fConst = float(sqrt(fArg)); break; default: break; } return; } // constant function (such as NOW()) if ( pRoot->m_iToken==TOK_FUNC && pRoot->m_iFunc==FUNC_NOW ) { pRoot->m_iToken = TOK_CONST_INT; pRoot->m_iConst = m_iConstNow; return; } // SINT(int-attr) if ( pRoot->m_iToken==TOK_FUNC && pRoot->m_iFunc==FUNC_SINT && ( pLeft->m_iToken==TOK_ATTR_INT || pLeft->m_iToken==TOK_ATTR_BITS ) ) { pRoot->m_iToken = TOK_ATTR_SINT; pRoot->m_tLocator = pLeft->m_tLocator; } } // debug dump void ExprParser_t::Dump ( int iNode ) { if ( iNode<0 ) return; ExprNode_t & tNode = m_dNodes[iNode]; switch ( tNode.m_iToken ) { case TOK_CONST_INT: printf ( INT64_FMT, tNode.m_iConst ); break; case TOK_CONST_FLOAT: printf ( "%f", tNode.m_fConst ); break; case TOK_ATTR_INT: case TOK_ATTR_SINT: printf ( "row[%d]", tNode.m_tLocator.m_iBitOffset/32 ); break; default: printf ( "(" ); Dump ( tNode.m_iLeft ); printf ( ( tNode.m_iToken<256 ) ? " %c " : " op-%d ", tNode.m_iToken ); Dump ( tNode.m_iRight ); printf ( ")" ); break; } } /// fold arglist into array static void FoldArglist ( ISphExpr * pLeft, CSphVector & dArgs ) { if ( !pLeft || !pLeft->IsArglist() ) { dArgs.Add ( pLeft ); return; } Expr_Arglist_c * pArgs = dynamic_cast ( pLeft ); assert ( pLeft ); Swap ( dArgs, pArgs->m_dArgs ); SafeRelease ( pLeft ); } typedef sphinx_int64_t ( *UdfInt_fn ) ( SPH_UDF_INIT *, SPH_UDF_ARGS *, char * ); typedef double ( *UdfDouble_fn ) ( SPH_UDF_INIT *, SPH_UDF_ARGS *, char * ); class Expr_Udf_c : public ISphExpr { public: CSphVector m_dArgs; protected: UdfCall_t * m_pCall; mutable CSphVector m_dArgvals; mutable char m_bError; public: explicit Expr_Udf_c ( UdfCall_t * pCall ) : m_pCall ( pCall ) , m_bError ( 0 ) { SPH_UDF_ARGS & tArgs = m_pCall->m_tArgs; assert ( tArgs.arg_values==NULL ); tArgs.arg_values = new char * [ tArgs.arg_count ]; tArgs.str_lengths = new int [ tArgs.arg_count ]; m_dArgvals.Resize ( tArgs.arg_count ); ARRAY_FOREACH ( i, m_dArgvals ) tArgs.arg_values[i] = (char*) &m_dArgvals[i]; } ~Expr_Udf_c () { if ( m_pCall->m_pUdf->m_fnDeinit ) m_pCall->m_pUdf->m_fnDeinit ( &m_pCall->m_tInit ); SafeDeleteArray ( m_pCall->m_tArgs.arg_names ); SafeDeleteArray ( m_pCall->m_tArgs.arg_types ); SafeDeleteArray ( m_pCall->m_tArgs.arg_values ); SafeDeleteArray ( m_pCall->m_tArgs.str_lengths ); SafeDelete ( m_pCall ); ARRAY_FOREACH ( i, m_dArgs ) SafeRelease ( m_dArgs[i] ); } void FillArgs ( const CSphMatch & tMatch ) const { // FIXME? a cleaner way to reinterpret? SPH_UDF_ARGS & tArgs = m_pCall->m_tArgs; ARRAY_FOREACH ( i, m_dArgs ) { switch ( tArgs.arg_types[i] ) { case SPH_UDF_TYPE_UINT32: *(DWORD*)&m_dArgvals[i] = m_dArgs[i]->IntEval ( tMatch ); break; case SPH_UDF_TYPE_INT64: m_dArgvals[i] = m_dArgs[i]->Int64Eval ( tMatch ); break; case SPH_UDF_TYPE_FLOAT: *(float*)&m_dArgvals[i] = m_dArgs[i]->Eval ( tMatch ); break; case SPH_UDF_TYPE_STRING: tArgs.str_lengths[i] = m_dArgs[i]->StringEval ( tMatch, (const BYTE**)&tArgs.arg_values[i] ); break; case SPH_UDF_TYPE_UINT32SET: tArgs.arg_values[i] = (char*) m_dArgs[i]->MvaEval ( tMatch ); break; case SPH_UDF_TYPE_UINT64SET: tArgs.arg_values[i] = (char*) m_dArgs[i]->MvaEval ( tMatch ); break; default: assert ( 0 ); m_dArgvals[i] = 0; break; } } } virtual void SetMVAPool ( const DWORD * pPool ) { ARRAY_FOREACH ( i, m_dArgs ) m_dArgs[i]->SetMVAPool ( pPool ); } virtual void SetStringPool ( const BYTE * pPool ) { ARRAY_FOREACH ( i, m_dArgs ) m_dArgs[i]->SetStringPool ( pPool ); } virtual void GetDependencyColumns ( CSphVector & dDeps ) const { ARRAY_FOREACH ( i, m_dArgs ) m_dArgs[i]->GetDependencyColumns ( dDeps ); } }; class Expr_UdfInt_c : public Expr_Udf_c { public: explicit Expr_UdfInt_c ( UdfCall_t * pCall ) : Expr_Udf_c ( pCall ) { assert ( pCall->m_pUdf->m_eRetType==SPH_ATTR_INTEGER || pCall->m_pUdf->m_eRetType==SPH_ATTR_BIGINT ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { if ( m_bError ) return 0; FillArgs ( tMatch ); UdfInt_fn pFn = (UdfInt_fn) m_pCall->m_pUdf->m_fnFunc; return (int) pFn ( &m_pCall->m_tInit, &m_pCall->m_tArgs, &m_bError ); } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int) Int64Eval ( tMatch ); } virtual float Eval ( const CSphMatch & tMatch ) const { return (float) Int64Eval ( tMatch ); } }; class Expr_UdfFloat_c : public Expr_Udf_c { public: explicit Expr_UdfFloat_c ( UdfCall_t * pCall ) : Expr_Udf_c ( pCall ) { assert ( pCall->m_pUdf->m_eRetType==SPH_ATTR_FLOAT ); } virtual float Eval ( const CSphMatch & tMatch ) const { if ( m_bError ) return 0; FillArgs ( tMatch ); UdfDouble_fn pFn = (UdfDouble_fn) m_pCall->m_pUdf->m_fnFunc; return (float) pFn ( &m_pCall->m_tInit, &m_pCall->m_tArgs, &m_bError ); } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int) Eval ( tMatch ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t) Eval ( tMatch ); } }; ISphExpr * ExprParser_t::CreateUdfNode ( int iCall, ISphExpr * pLeft ) { Expr_Udf_c * pRes = NULL; switch ( m_dUdfCalls[iCall]->m_pUdf->m_eRetType ) { case SPH_ATTR_INTEGER: case SPH_ATTR_BIGINT: pRes = new Expr_UdfInt_c ( m_dUdfCalls[iCall] ); break; case SPH_ATTR_FLOAT: pRes = new Expr_UdfFloat_c ( m_dUdfCalls[iCall] ); break; default: m_sParserError.SetSprintf ( "internal error: unhandled type %d in CreateUdfNode()", m_dUdfCalls[iCall]->m_pUdf->m_eRetType ); break; } if ( pRes ) { if ( pLeft ) FoldArglist ( pLeft, pRes->m_dArgs ); m_dUdfCalls[iCall] = NULL; // evaluator owns it now } return pRes; } /// fold nodes subtree into opcodes ISphExpr * ExprParser_t::CreateTree ( int iNode ) { if ( iNode<0 ) return NULL; const ExprNode_t & tNode = m_dNodes[iNode]; // avoid spawning argument node in some cases bool bSkipLeft = false; bool bSkipRight = false; if ( tNode.m_iToken==TOK_FUNC ) { Func_e eFunc = g_dFuncs[tNode.m_iFunc].m_eFunc; if ( eFunc==FUNC_GEODIST || eFunc==FUNC_IN ) bSkipLeft = true; if ( eFunc==FUNC_IN ) bSkipRight = true; } ISphExpr * pLeft = bSkipLeft ? NULL : CreateTree ( tNode.m_iLeft ); ISphExpr * pRight = bSkipRight ? NULL : CreateTree ( tNode.m_iRight ); #define LOC_SPAWN_POLY(_classname) \ if ( tNode.m_eArgType==SPH_ATTR_INTEGER ) return new _classname##Int_c ( pLeft, pRight ); \ else if ( tNode.m_eArgType==SPH_ATTR_BIGINT ) return new _classname##Int64_c ( pLeft, pRight ); \ else return new _classname##Float_c ( pLeft, pRight ); switch ( tNode.m_iToken ) { case TOK_ATTR_INT: return new Expr_GetInt_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_ATTR_BITS: return new Expr_GetBits_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_ATTR_FLOAT: return new Expr_GetFloat_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_ATTR_SINT: return new Expr_GetSint_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_ATTR_STRING: return new Expr_GetString_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_ATTR_MVA64: case TOK_ATTR_MVA32: return new Expr_GetMva_c ( tNode.m_tLocator, tNode.m_iLocator ); case TOK_CONST_FLOAT: return new Expr_GetConst_c ( tNode.m_fConst ); case TOK_CONST_INT: if ( tNode.m_eRetType==SPH_ATTR_INTEGER ) return new Expr_GetIntConst_c ( (int)tNode.m_iConst ); else if ( tNode.m_eRetType==SPH_ATTR_BIGINT ) return new Expr_GetInt64Const_c ( tNode.m_iConst ); else return new Expr_GetConst_c ( float(tNode.m_iConst) ); break; case TOK_CONST_STRING: return new Expr_GetStrConst_c ( m_sExpr+(int)( tNode.m_iConst>>32 ), (int)( tNode.m_iConst & 0xffffffffUL ) ); case TOK_ID: return new Expr_GetId_c (); case TOK_WEIGHT: return new Expr_GetWeight_c (); case '+': return new Expr_Add_c ( pLeft, pRight ); break; case '-': return new Expr_Sub_c ( pLeft, pRight ); break; case '*': return new Expr_Mul_c ( pLeft, pRight ); break; case '/': return new Expr_Div_c ( pLeft, pRight ); break; case '&': return new Expr_BitAnd_c ( pLeft, pRight ); break; case '|': return new Expr_BitOr_c ( pLeft, pRight ); break; case '%': return new Expr_Mod_c ( pLeft, pRight ); break; case '<': LOC_SPAWN_POLY ( Expr_Lt ); break; case '>': LOC_SPAWN_POLY ( Expr_Gt ); break; case TOK_LTE: LOC_SPAWN_POLY ( Expr_Lte ); break; case TOK_GTE: LOC_SPAWN_POLY ( Expr_Gte ); break; case TOK_EQ: LOC_SPAWN_POLY ( Expr_Eq ); break; case TOK_NE: LOC_SPAWN_POLY ( Expr_Ne ); break; case TOK_AND: LOC_SPAWN_POLY ( Expr_And ); break; case TOK_OR: LOC_SPAWN_POLY ( Expr_Or ); break; case TOK_NOT: if ( tNode.m_eArgType==SPH_ATTR_BIGINT ) return new Expr_NotInt64_c ( pLeft ); else return new Expr_NotInt_c ( pLeft ); break; case ',': return new Expr_Arglist_c ( pLeft, pRight ); break; case TOK_NEG: assert ( pRight==NULL ); return new Expr_Neg_c ( pLeft ); break; case TOK_FUNC: { // fold arglist to array Func_e eFunc = g_dFuncs[tNode.m_iFunc].m_eFunc; CSphVector dArgs; if ( !bSkipLeft ) FoldArglist ( pLeft, dArgs ); // spawn proper function assert ( tNode.m_iFunc>=0 && tNode.m_iFunc=0 && g_dFuncs[tNode.m_iFunc].m_iArgs==dArgs.GetLength() ) || // arg count matches, ( g_dFuncs[tNode.m_iFunc].m_iArgs<0 && -g_dFuncs[tNode.m_iFunc].m_iArgs<=dArgs.GetLength() ) ); // or min vararg count reached switch ( eFunc ) { case FUNC_NOW: assert ( 0 ); break; // prevent gcc bitching case FUNC_ABS: return new Expr_Abs_c ( dArgs[0] ); case FUNC_CEIL: return new Expr_Ceil_c ( dArgs[0] ); case FUNC_FLOOR: return new Expr_Floor_c ( dArgs[0] ); case FUNC_SIN: return new Expr_Sin_c ( dArgs[0] ); case FUNC_COS: return new Expr_Cos_c ( dArgs[0] ); case FUNC_LN: return new Expr_Ln_c ( dArgs[0] ); case FUNC_LOG2: return new Expr_Log2_c ( dArgs[0] ); case FUNC_LOG10: return new Expr_Log10_c ( dArgs[0] ); case FUNC_EXP: return new Expr_Exp_c ( dArgs[0] ); case FUNC_SQRT: return new Expr_Sqrt_c ( dArgs[0] ); case FUNC_BIGINT: return dArgs[0]; case FUNC_SINT: return new Expr_Sint_c ( dArgs[0] ); case FUNC_CRC32: return new Expr_Crc32_c ( dArgs[0] ); case FUNC_FIBONACCI:return new Expr_Fibonacci_c ( dArgs[0] ); case FUNC_DAY: return new Expr_Day_c ( dArgs[0] ); case FUNC_MONTH: return new Expr_Month_c ( dArgs[0] ); case FUNC_YEAR: return new Expr_Year_c ( dArgs[0] ); case FUNC_YEARMONTH: return new Expr_YearMonth_c ( dArgs[0] ); case FUNC_YEARMONTHDAY: return new Expr_YearMonthDay_c ( dArgs[0] ); case FUNC_MIN: return new Expr_Min_c ( dArgs[0], dArgs[1] ); case FUNC_MAX: return new Expr_Max_c ( dArgs[0], dArgs[1] ); case FUNC_POW: return new Expr_Pow_c ( dArgs[0], dArgs[1] ); case FUNC_IDIV: return new Expr_Idiv_c ( dArgs[0], dArgs[1] ); case FUNC_IF: return new Expr_If_c ( dArgs[0], dArgs[1], dArgs[2] ); case FUNC_MADD: return new Expr_Madd_c ( dArgs[0], dArgs[1], dArgs[2] ); case FUNC_MUL3: return new Expr_Mul3_c ( dArgs[0], dArgs[1], dArgs[2] ); case FUNC_INTERVAL: return CreateIntervalNode ( tNode.m_iLeft, dArgs ); case FUNC_IN: return CreateInNode ( iNode ); case FUNC_BITDOT: return CreateBitdotNode ( tNode.m_iLeft, dArgs ); case FUNC_GEODIST: return CreateGeodistNode ( tNode.m_iLeft ); } assert ( 0 && "unhandled function id" ); break; } case TOK_UDF: return CreateUdfNode ( tNode.m_iFunc, pLeft ); break; case TOK_HOOK_IDENT: return m_pHook->CreateNode ( tNode.m_iFunc, NULL ); break; case TOK_HOOK_FUNC: return m_pHook->CreateNode ( tNode.m_iFunc, pLeft ); break; default: assert ( 0 && "unhandled token type" ); break; } #undef LOC_SPAWN_POLY // fire exit SafeRelease ( pLeft ); SafeRelease ( pRight ); return NULL; } ////////////////////////////////////////////////////////////////////////// /// arg-vs-set function (currently, IN or INTERVAL) evaluator traits template < typename T > class Expr_ArgVsSet_c : public ISphExpr { protected: ISphExpr * m_pArg; public: explicit Expr_ArgVsSet_c ( ISphExpr * pArg ) : m_pArg ( pArg ) {} ~Expr_ArgVsSet_c () { SafeRelease ( m_pArg ); } virtual int IntEval ( const CSphMatch & tMatch ) const = 0; virtual float Eval ( const CSphMatch & tMatch ) const { return (float) IntEval ( tMatch ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return IntEval ( tMatch ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { assert ( m_pArg ); m_pArg->GetDependencyColumns ( dColumns ); } protected: T ExprEval ( ISphExpr * pArg, const CSphMatch & tMatch ) const; }; template<> int Expr_ArgVsSet_c::ExprEval ( ISphExpr * pArg, const CSphMatch & tMatch ) const { return pArg->IntEval ( tMatch ); } template<> DWORD Expr_ArgVsSet_c::ExprEval ( ISphExpr * pArg, const CSphMatch & tMatch ) const { return (DWORD)pArg->IntEval ( tMatch ); } template<> float Expr_ArgVsSet_c::ExprEval ( ISphExpr * pArg, const CSphMatch & tMatch ) const { return pArg->Eval ( tMatch ); } template<> int64_t Expr_ArgVsSet_c::ExprEval ( ISphExpr * pArg, const CSphMatch & tMatch ) const { return pArg->Int64Eval ( tMatch ); } /// arg-vs-constant-set template < typename T > class Expr_ArgVsConstSet_c : public Expr_ArgVsSet_c { protected: CSphVector m_dValues; public: /// take ownership of arg, pre-evaluate and dismiss turn points Expr_ArgVsConstSet_c ( ISphExpr * pArg, CSphVector & dArgs, int iSkip ) : Expr_ArgVsSet_c ( pArg ) { CSphMatch tDummy; for ( int i=iSkip; i::ExprEval ( dArgs[i], tDummy ) ); SafeRelease ( dArgs[i] ); } } /// take ownership of arg, and copy that constlist Expr_ArgVsConstSet_c ( ISphExpr * pArg, ConstList_c * pConsts ) : Expr_ArgVsSet_c ( pArg ) { if ( !pConsts ) return; // can happen on uservar path if ( pConsts->m_eRetType==SPH_ATTR_FLOAT ) { m_dValues.Reserve ( pConsts->m_dFloats.GetLength() ); ARRAY_FOREACH ( i, pConsts->m_dFloats ) m_dValues.Add ( (T)pConsts->m_dFloats[i] ); } else { m_dValues.Reserve ( pConsts->m_dInts.GetLength() ); ARRAY_FOREACH ( i, pConsts->m_dInts ) m_dValues.Add ( (T)pConsts->m_dInts[i] ); } } }; ////////////////////////////////////////////////////////////////////////// /// INTERVAL() evaluator for constant turn point values case template < typename T > class Expr_IntervalConst_c : public Expr_ArgVsConstSet_c { public: /// take ownership of arg, pre-evaluate and dismiss turn points explicit Expr_IntervalConst_c ( CSphVector & dArgs ) : Expr_ArgVsConstSet_c ( dArgs[0], dArgs, 1 ) {} /// evaluate arg, return interval id virtual int IntEval ( const CSphMatch & tMatch ) const { T val = ExprEval ( this->m_pArg, tMatch ); // 'this' fixes gcc braindamage ARRAY_FOREACH ( i, this->m_dValues ) // FIXME! OPTIMIZE! perform binary search here if ( valm_dValues[i] ) return i; return this->m_dValues.GetLength(); } virtual void SetMVAPool ( const DWORD * pMvaPool ) { this->m_pArg->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { this->m_pArg->SetStringPool ( pStrings ); } }; /// generic INTERVAL() evaluator template < typename T > class Expr_Interval_c : public Expr_ArgVsSet_c { protected: CSphVector m_dTurnPoints; public: /// take ownership of arg and turn points explicit Expr_Interval_c ( const CSphVector & dArgs ) : Expr_ArgVsSet_c ( dArgs[0] ) { for ( int i=1; im_pArg, tMatch ); // 'this' fixes gcc braindamage ARRAY_FOREACH ( i, m_dTurnPoints ) if ( val < Expr_ArgVsSet_c::ExprEval ( m_dTurnPoints[i], tMatch ) ) return i; return m_dTurnPoints.GetLength(); } virtual void SetMVAPool ( const DWORD * pMvaPool ) { this->m_pArg->SetMVAPool ( pMvaPool ); ARRAY_FOREACH ( i, m_dTurnPoints ) m_dTurnPoints[i]->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { this->m_pArg->SetStringPool ( pStrings ); ARRAY_FOREACH ( i, m_dTurnPoints ) m_dTurnPoints[i]->SetStringPool ( pStrings ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { Expr_ArgVsSet_c::GetDependencyColumns ( dColumns ); ARRAY_FOREACH ( i, m_dTurnPoints ) m_dTurnPoints[i]->GetDependencyColumns ( dColumns ); } }; ////////////////////////////////////////////////////////////////////////// /// IN() evaluator, arbitrary scalar expression vs. constant values template < typename T > class Expr_In_c : public Expr_ArgVsConstSet_c { public: /// pre-sort values for binary search Expr_In_c ( ISphExpr * pArg, ConstList_c * pConsts ) : Expr_ArgVsConstSet_c ( pArg, pConsts ) { this->m_dValues.Sort(); } /// evaluate arg, check if the value is within set virtual int IntEval ( const CSphMatch & tMatch ) const { T val = ExprEval ( this->m_pArg, tMatch ); // 'this' fixes gcc braindamage return this->m_dValues.BinarySearch ( val )!=NULL; } virtual void SetMVAPool ( const DWORD * pMvaPool ) { this->m_pArg->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { this->m_pArg->SetStringPool ( pStrings ); } }; /// IN() evaluator, arbitrary scalar expression vs. uservar /// (for the sake of evaluator, uservar is a pre-sorted, refcounted external vector) class Expr_InUservar_c : public Expr_ArgVsSet_c { protected: UservarIntSet_c * m_pConsts; public: /// just get hold of args explicit Expr_InUservar_c ( ISphExpr * pArg, UservarIntSet_c * pConsts ) : Expr_ArgVsSet_c ( pArg ) , m_pConsts ( pConsts ) // no addref, hook should have addref'd (otherwise there'd be a race) {} /// release the uservar value ~Expr_InUservar_c() { SafeRelease ( m_pConsts ); } /// evaluate arg, check if the value is within set virtual int IntEval ( const CSphMatch & tMatch ) const { int64_t iVal = ExprEval ( this->m_pArg, tMatch ); // 'this' fixes gcc braindamage return m_pConsts->BinarySearch ( iVal )!=NULL; } virtual void SetMVAPool ( const DWORD * pMvaPool ) { this->m_pArg->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { this->m_pArg->SetStringPool ( pStrings ); } }; /// IN() evaluator, MVA attribute vs. constant values template < bool MVA64 > class Expr_MVAIn_c : public Expr_ArgVsConstSet_c { public: /// pre-sort values for binary search Expr_MVAIn_c ( const CSphAttrLocator & tLoc, int iLocator, ConstList_c * pConsts, UservarIntSet_c * pUservar ) : Expr_ArgVsConstSet_c ( NULL, pConsts ) , m_tLocator ( tLoc ) , m_iLocator ( iLocator ) , m_pMvaPool ( NULL ) , m_pUservar ( pUservar ) { assert ( tLoc.m_iBitOffset>=0 && tLoc.m_iBitCount>0 ); assert ( !pConsts || !pUservar ); // either constlist or uservar, not both this->m_dValues.Sort(); } ~Expr_MVAIn_c() { SafeRelease ( m_pUservar ); } int MvaEval ( const DWORD * pMva ) const; /// evaluate arg, check if any values are within set virtual int IntEval ( const CSphMatch & tMatch ) const { const DWORD * pMva = tMatch.GetAttrMVA ( m_tLocator, m_pMvaPool ); if ( !pMva ) return 0; return MvaEval ( pMva ); } virtual void SetMVAPool ( const DWORD * pMvaPool ) { m_pMvaPool = pMvaPool; // finally, some real setup work!!! } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { dColumns.Add ( m_iLocator ); } protected: CSphAttrLocator m_tLocator; int m_iLocator; const DWORD * m_pMvaPool; UservarIntSet_c * m_pUservar; }; template<> int Expr_MVAIn_c::MvaEval ( const DWORD * pMva ) const { // OPTIMIZE! FIXME! factor out a common function with Filter_MVAValues::Eval() DWORD uLen = *pMva++; const DWORD * pMvaMax = pMva+uLen; const uint64_t * pFilter = m_pUservar ? (uint64_t*)m_pUservar->Begin() : m_dValues.Begin(); const uint64_t * pFilterMax = pFilter + ( m_pUservar ? m_pUservar->GetLength() : m_dValues.GetLength() ); const DWORD * L = pMva; const DWORD * R = pMvaMax - 1; for ( ; pFilter < pFilterMax; pFilter++ ) { while ( L<=R ) { const DWORD * m = L + (R - L) / 2; if ( *pFilter > *m ) L = m + 1; else if ( *pFilter < *m ) R = m - 1; else return 1; } R = pMvaMax - 1; } return 0; } template<> int Expr_MVAIn_c::MvaEval ( const DWORD * pMva ) const { // OPTIMIZE! FIXME! factor out a common function with Filter_MVAValues::Eval() DWORD uLen = *pMva++; assert ( ( uLen%2 )==0 ); const DWORD * pMvaMax = pMva+uLen; const uint64_t * pFilter = m_pUservar ? (uint64_t*)m_pUservar->Begin() : m_dValues.Begin(); const uint64_t * pFilterMax = pFilter + ( m_pUservar ? m_pUservar->GetLength() : m_dValues.GetLength() ); const uint64_t * L = (const uint64_t *)pMva; const uint64_t * R = (const uint64_t *)( pMvaMax - 2 ); for ( ; pFilter < pFilterMax; pFilter++ ) { while ( L<=R ) { const uint64_t * pVal = L + (R - L) / 2; uint64_t uMva = MVA_UPSIZE ( (const DWORD *)pVal ); if ( *pFilter > uMva ) L = pVal + 1; else if ( *pFilter < uMva ) R = pVal - 1; else return 1; } R = (const uint64_t *) ( pMvaMax - 2 ); } return 0; } ////////////////////////////////////////////////////////////////////////// /// generic BITDOT() evaluator template < typename T > class Expr_Bitdot_c : public Expr_ArgVsSet_c { protected: CSphVector m_dBitWeights; public: /// take ownership of arg and turn points explicit Expr_Bitdot_c ( const CSphVector & dArgs ) : Expr_ArgVsSet_c ( dArgs[0] ) { for ( int i=1; im_pArg->Int64Eval ( tMatch ); // 'this' fixes gcc braindamage T tRes = 0; int iBit = 0; while ( uArg && iBit::ExprEval ( m_dBitWeights[iBit], tMatch ); uArg >>= 1; iBit++; } return tRes; } public: virtual float Eval ( const CSphMatch & tMatch ) const { return (float) DoEval ( tMatch ); } virtual int IntEval ( const CSphMatch & tMatch ) const { return (int) DoEval ( tMatch ); } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { return (int64_t) DoEval ( tMatch ); } virtual void SetMVAPool ( const DWORD * pMvaPool ) { this->m_pArg->SetMVAPool ( pMvaPool ); ARRAY_FOREACH ( i, m_dBitWeights ) m_dBitWeights[i]->SetMVAPool ( pMvaPool ); } virtual void SetStringPool ( const BYTE * pStrings ) { this->m_pArg->SetStringPool ( pStrings ); ARRAY_FOREACH ( i, m_dBitWeights ) m_dBitWeights[i]->SetStringPool ( pStrings ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { Expr_ArgVsSet_c::GetDependencyColumns ( dColumns ); ARRAY_FOREACH ( i, m_dBitWeights ) m_dBitWeights[i]->GetDependencyColumns ( dColumns ); } }; ////////////////////////////////////////////////////////////////////////// static inline double sphSqr ( double v ) { return v * v; } static inline float CalcGeodist ( float fPointLat, float fPointLon, float fAnchorLat, float fAnchorLon ) { const double R = 6384000; double dlat = fPointLat - fAnchorLat; double dlon = fPointLon - fAnchorLon; double a = sphSqr ( sin ( dlat/2 ) ) + cos ( fPointLat ) * cos ( fAnchorLat ) * sphSqr ( sin ( dlon/2 ) ); double c = 2*asin ( Min ( 1, sqrt(a) ) ); return (float)(R*c); } /// geodist() - attr point, constant anchor class Expr_GeodistAttrConst_c: public ISphExpr { public: Expr_GeodistAttrConst_c ( CSphAttrLocator tLat, CSphAttrLocator tLon, float fAnchorLat, float fAnchorLon, int iLat, int iLon ) : m_tLat ( tLat ) , m_tLon ( tLon ) , m_fAnchorLat ( fAnchorLat ) , m_fAnchorLon ( fAnchorLon ) , m_iLat ( iLat ) , m_iLon ( iLon ) {} virtual float Eval ( const CSphMatch & tMatch ) const { return CalcGeodist ( tMatch.GetAttrFloat ( m_tLat ), tMatch.GetAttrFloat ( m_tLon ), m_fAnchorLat, m_fAnchorLon ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { dColumns.Add ( m_iLat ); dColumns.Add ( m_iLon ); } private: CSphAttrLocator m_tLat; CSphAttrLocator m_tLon; float m_fAnchorLat; float m_fAnchorLon; int m_iLat; int m_iLon; }; /// geodist() - expr point, constant anchor class Expr_GeodistConst_c: public ISphExpr { public: Expr_GeodistConst_c ( ISphExpr * pLat, ISphExpr * pLon, float fAnchorLat, float fAnchorLon ) : m_pLat ( pLat ) , m_pLon ( pLon ) , m_fAnchorLat ( fAnchorLat ) , m_fAnchorLon ( fAnchorLon ) {} ~Expr_GeodistConst_c () { SafeRelease ( m_pLon ); SafeRelease ( m_pLat ); } virtual float Eval ( const CSphMatch & tMatch ) const { return CalcGeodist ( m_pLat->Eval(tMatch), m_pLon->Eval(tMatch), m_fAnchorLat, m_fAnchorLon ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { m_pLat->GetDependencyColumns ( dColumns ); m_pLon->GetDependencyColumns ( dColumns ); } private: ISphExpr * m_pLat; ISphExpr * m_pLon; float m_fAnchorLat; float m_fAnchorLon; }; /// geodist() - expr point, expr anchor class Expr_Geodist_c: public ISphExpr { public: Expr_Geodist_c ( ISphExpr * pLat, ISphExpr * pLon, ISphExpr * pAnchorLat, ISphExpr * pAnchorLon ) : m_pLat ( pLat ) , m_pLon ( pLon ) , m_pAnchorLat ( pAnchorLat ) , m_pAnchorLon ( pAnchorLon ) {} ~Expr_Geodist_c () { SafeRelease ( m_pAnchorLon ); SafeRelease ( m_pAnchorLat ); SafeRelease ( m_pLon ); SafeRelease ( m_pLat ); } virtual float Eval ( const CSphMatch & tMatch ) const { return CalcGeodist ( m_pLat->Eval(tMatch), m_pLon->Eval(tMatch), m_pAnchorLat->Eval(tMatch), m_pAnchorLon->Eval(tMatch) ); } virtual void GetDependencyColumns ( CSphVector & dColumns ) const { m_pLat->GetDependencyColumns ( dColumns ); m_pLon->GetDependencyColumns ( dColumns ); m_pAnchorLat->GetDependencyColumns ( dColumns ); m_pAnchorLon->GetDependencyColumns ( dColumns ); } private: ISphExpr * m_pLat; ISphExpr * m_pLon; ISphExpr * m_pAnchorLat; ISphExpr * m_pAnchorLon; }; ////////////////////////////////////////////////////////////////////////// void ExprParser_t::GatherArgTypes ( int iNode, CSphVector & dTypes ) { if ( iNode<0 ) return; const ExprNode_t & tNode = m_dNodes[iNode]; if ( tNode.m_iToken==',' ) { GatherArgTypes ( tNode.m_iLeft, dTypes ); GatherArgTypes ( tNode.m_iRight, dTypes ); } else { dTypes.Add ( tNode.m_iToken ); } } void ExprParser_t::GatherArgNodes ( int iNode, CSphVector & dNodes ) { if ( iNode<0 ) return; const ExprNode_t & tNode = m_dNodes[iNode]; if ( tNode.m_iToken==',' ) { GatherArgNodes ( tNode.m_iLeft, dNodes ); GatherArgNodes ( tNode.m_iRight, dNodes ); } else dNodes.Add ( iNode ); } void ExprParser_t::GatherArgRetTypes ( int iNode, CSphVector & dTypes ) { if ( iNode<0 ) return; const ExprNode_t & tNode = m_dNodes[iNode]; if ( tNode.m_iToken==',' ) { GatherArgRetTypes ( tNode.m_iLeft, dTypes ); GatherArgRetTypes ( tNode.m_iRight, dTypes ); } else { dTypes.Add ( tNode.m_eRetType ); } } bool ExprParser_t::CheckForConstSet ( int iArgsNode, int iSkip ) { CSphVector dTypes; GatherArgTypes ( iArgsNode, dTypes ); for ( int i=iSkip; i void ExprParser_t::WalkTree ( int iRoot, T & FUNCTOR ) { if ( iRoot>=0 ) { const ExprNode_t & tNode = m_dNodes[iRoot]; FUNCTOR.Enter ( tNode ); WalkTree ( tNode.m_iLeft, FUNCTOR ); WalkTree ( tNode.m_iRight, FUNCTOR ); FUNCTOR.Exit ( tNode ); } } ISphExpr * ExprParser_t::CreateIntervalNode ( int iArgsNode, CSphVector & dArgs ) { assert ( dArgs.GetLength()>=2 ); bool bConst = CheckForConstSet ( iArgsNode, 1 ); ESphAttr eAttrType = m_dNodes[iArgsNode].m_eArgType; if ( bConst ) { switch ( eAttrType ) { case SPH_ATTR_INTEGER: return new Expr_IntervalConst_c ( dArgs ); break; case SPH_ATTR_BIGINT: return new Expr_IntervalConst_c ( dArgs ); break; default: return new Expr_IntervalConst_c ( dArgs ); break; } } else { switch ( eAttrType ) { case SPH_ATTR_INTEGER: return new Expr_Interval_c ( dArgs ); break; case SPH_ATTR_BIGINT: return new Expr_Interval_c ( dArgs ); break; default: return new Expr_Interval_c ( dArgs ); break; } } } ISphExpr * ExprParser_t::CreateInNode ( int iNode ) { const ExprNode_t & tLeft = m_dNodes[m_dNodes[iNode].m_iLeft]; const ExprNode_t & tRight = m_dNodes[m_dNodes[iNode].m_iRight]; switch ( tRight.m_iToken ) { // create IN(arg,constlist) case TOK_CONST_LIST: switch ( tLeft.m_iToken ) { case TOK_ATTR_MVA32: return new Expr_MVAIn_c ( tLeft.m_tLocator, tLeft.m_iLocator, tRight.m_pConsts, NULL ); case TOK_ATTR_MVA64: return new Expr_MVAIn_c ( tLeft.m_tLocator, tLeft.m_iLocator, tRight.m_pConsts, NULL ); default: { ISphExpr * pArg = CreateTree ( m_dNodes[iNode].m_iLeft ); switch ( tRight.m_pConsts->m_eRetType ) { case SPH_ATTR_INTEGER: return new Expr_In_c ( pArg, tRight.m_pConsts ); break; case SPH_ATTR_BIGINT: return new Expr_In_c ( pArg, tRight.m_pConsts ); break; default: return new Expr_In_c ( pArg, tRight.m_pConsts ); break; } } } break; // create IN(arg,uservar) case TOK_USERVAR: { if ( !g_pUservarsHook ) { m_sCreateError.SetSprintf ( "internal error: no uservars hook" ); return NULL; } UservarIntSet_c * pUservar = g_pUservarsHook ( m_dUservars[(int)tRight.m_iConst] ); if ( !pUservar ) { m_sCreateError.SetSprintf ( "undefined user variable '%s'", m_dUservars[(int)tRight.m_iConst].cstr() ); return NULL; } switch ( tLeft.m_iToken ) { case TOK_ATTR_MVA32: return new Expr_MVAIn_c ( tLeft.m_tLocator, tLeft.m_iLocator, NULL, pUservar ); case TOK_ATTR_MVA64: return new Expr_MVAIn_c ( tLeft.m_tLocator, tLeft.m_iLocator, NULL, pUservar ); default: return new Expr_InUservar_c ( CreateTree ( m_dNodes[iNode].m_iLeft ), pUservar ); } break; } // oops, unhandled case default: m_sCreateError = "IN() arguments must be constants (except the 1st one)"; return NULL; } } ISphExpr * ExprParser_t::CreateGeodistNode ( int iArgs ) { CSphVector dArgs; GatherArgNodes ( iArgs, dArgs ); assert ( dArgs.GetLength()==4 ); bool bConst1 = ( IsConst ( &m_dNodes[dArgs[0]] ) && IsConst ( &m_dNodes[dArgs[1]] ) ); bool bConst2 = ( IsConst ( &m_dNodes[dArgs[2]] ) && IsConst ( &m_dNodes[dArgs[3]] ) ); if ( bConst1 && bConst2 ) { return new Expr_GetConst_c ( CalcGeodist ( m_dNodes[dArgs[0]].FloatVal(), m_dNodes[dArgs[1]].FloatVal(), m_dNodes[dArgs[2]].FloatVal(), m_dNodes[dArgs[3]].FloatVal() ) ); } if ( bConst1 ) { Swap ( dArgs[0], dArgs[2] ); Swap ( dArgs[1], dArgs[3] ); Swap ( bConst1, bConst2 ); } if ( bConst2 ) { // constant anchor if ( m_dNodes[dArgs[0]].m_iToken==TOK_ATTR_FLOAT && m_dNodes[dArgs[1]].m_iToken==TOK_ATTR_FLOAT ) { // attr point return new Expr_GeodistAttrConst_c ( m_dNodes[dArgs[0]].m_tLocator, m_dNodes[dArgs[1]].m_tLocator, m_dNodes[dArgs[2]].FloatVal(), m_dNodes[dArgs[3]].FloatVal(), m_dNodes[dArgs[0]].m_iLocator, m_dNodes[dArgs[1]].m_iLocator ); } else { // expr point return new Expr_GeodistConst_c ( CreateTree ( dArgs[0] ), CreateTree ( dArgs[1] ), m_dNodes[dArgs[2]].FloatVal(), m_dNodes[dArgs[3]].FloatVal() ); } } // four expressions CSphVector dExpr; FoldArglist ( CreateTree ( iArgs ), dExpr ); assert ( dExpr.GetLength()==4 ); return new Expr_Geodist_c ( dExpr[0], dExpr[1], dExpr[2], dExpr[3] ); } ISphExpr * ExprParser_t::CreateBitdotNode ( int iArgsNode, CSphVector & dArgs ) { assert ( dArgs.GetLength()>=1 ); ESphAttr eAttrType = m_dNodes[iArgsNode].m_eRetType; switch ( eAttrType ) { case SPH_ATTR_INTEGER: return new Expr_Bitdot_c ( dArgs ); break; case SPH_ATTR_BIGINT: return new Expr_Bitdot_c ( dArgs ); break; default: return new Expr_Bitdot_c ( dArgs ); break; } } ////////////////////////////////////////////////////////////////////////// int yylex ( YYSTYPE * lvalp, ExprParser_t * pParser ) { return pParser->GetToken ( lvalp ); } void yyerror ( ExprParser_t * pParser, const char * sMessage ) { pParser->m_sParserError.SetSprintf ( "Sphinx expr: %s near '%s'", sMessage, pParser->m_pLastTokenStart ); } #if USE_WINDOWS #pragma warning(push,1) #endif #include "yysphinxexpr.c" #if USE_WINDOWS #pragma warning(pop) #endif ////////////////////////////////////////////////////////////////////////// ExprParser_t::~ExprParser_t () { // i kinda own those constlists ARRAY_FOREACH ( i, m_dNodes ) if ( m_dNodes[i].m_iToken==TOK_CONST_LIST ) SafeDelete ( m_dNodes[i].m_pConsts ); // free any UDF calls that weren't taken over ARRAY_FOREACH ( i, m_dUdfCalls ) SafeDelete ( m_dUdfCalls[i] ); } ESphAttr ExprParser_t::GetWidestRet ( int iLeft, int iRight ) { ESphAttr uLeftType = ( iLeft<0 ) ? SPH_ATTR_INTEGER : m_dNodes[iLeft].m_eRetType; ESphAttr uRightType = ( iRight<0 ) ? SPH_ATTR_INTEGER : m_dNodes[iRight].m_eRetType; ESphAttr uRes = SPH_ATTR_FLOAT; // default is float if ( ( uLeftType==SPH_ATTR_INTEGER || uLeftType==SPH_ATTR_BIGINT ) && ( uRightType==SPH_ATTR_INTEGER || uRightType==SPH_ATTR_BIGINT ) ) { // both types are integer (int32 or int64), compute in integers uRes = ( uLeftType==SPH_ATTR_INTEGER && uRightType==SPH_ATTR_INTEGER ) ? SPH_ATTR_INTEGER : SPH_ATTR_BIGINT; } return uRes; } int ExprParser_t::AddNodeInt ( int64_t iValue ) { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_CONST_INT; tNode.m_eRetType = GetIntType ( iValue ); tNode.m_iConst = iValue; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeFloat ( float fValue ) { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_CONST_FLOAT; tNode.m_eRetType = SPH_ATTR_FLOAT; tNode.m_fConst = fValue; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeString ( int64_t iValue ) { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_CONST_STRING; tNode.m_eRetType = SPH_ATTR_STRING; tNode.m_iConst = iValue; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeAttr ( int iTokenType, uint64_t uAttrLocator ) { assert ( iTokenType==TOK_ATTR_INT || iTokenType==TOK_ATTR_BITS || iTokenType==TOK_ATTR_FLOAT || iTokenType==TOK_ATTR_MVA32 || iTokenType==TOK_ATTR_MVA64 || iTokenType==TOK_ATTR_STRING ); ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = iTokenType; sphUnpackAttrLocator ( uAttrLocator, &tNode ); if ( iTokenType==TOK_ATTR_FLOAT ) tNode.m_eRetType = SPH_ATTR_FLOAT; else if ( iTokenType==TOK_ATTR_MVA32 ) tNode.m_eRetType = SPH_ATTR_UINT32SET; else if ( iTokenType==TOK_ATTR_MVA64 ) tNode.m_eRetType = SPH_ATTR_UINT64SET; else if ( iTokenType==TOK_ATTR_STRING ) tNode.m_eRetType = SPH_ATTR_STRING; else if ( tNode.m_tLocator.m_iBitCount>32 ) tNode.m_eRetType = SPH_ATTR_BIGINT; else tNode.m_eRetType = SPH_ATTR_INTEGER; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeID () { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_ID; tNode.m_eRetType = USE_64BIT ? SPH_ATTR_BIGINT : SPH_ATTR_INTEGER; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeWeight () { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_WEIGHT; tNode.m_eRetType = SPH_ATTR_INTEGER; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeOp ( int iOp, int iLeft, int iRight ) { ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = iOp; // deduce type tNode.m_eRetType = SPH_ATTR_FLOAT; // default to float if ( iOp==TOK_NEG ) { // NEG just inherits the type tNode.m_eArgType = m_dNodes[iLeft].m_eRetType; tNode.m_eRetType = tNode.m_eArgType; } else if ( iOp==TOK_NOT ) { // NOT result is integer, and its argument must be integer tNode.m_eArgType = m_dNodes[iLeft].m_eRetType; tNode.m_eRetType = SPH_ATTR_INTEGER; if (!( tNode.m_eArgType==SPH_ATTR_INTEGER || tNode.m_eArgType==SPH_ATTR_BIGINT )) { m_sParserError.SetSprintf ( "NOT argument must be integer" ); return -1; } } else if ( iOp==TOK_LTE || iOp==TOK_GTE || iOp==TOK_EQ || iOp==TOK_NE || iOp=='<' || iOp=='>' || iOp==TOK_AND || iOp==TOK_OR || iOp=='+' || iOp=='-' || iOp=='*' || iOp==',' || iOp=='&' || iOp=='|' || iOp=='%' ) { tNode.m_eArgType = GetWidestRet ( iLeft, iRight ); // arithmetical operations return arg type, logical return int tNode.m_eRetType = ( iOp=='+' || iOp=='-' || iOp=='*' || iOp==',' || iOp=='&' || iOp=='|' || iOp=='%' ) ? tNode.m_eArgType : SPH_ATTR_INTEGER; // both logical and bitwise AND/OR can only be over ints if ( ( iOp==TOK_AND || iOp==TOK_OR || iOp=='&' || iOp=='|' ) && !( tNode.m_eArgType==SPH_ATTR_INTEGER || tNode.m_eArgType==SPH_ATTR_BIGINT )) { m_sParserError.SetSprintf ( "%s arguments must be integer", ( iOp==TOK_AND || iOp=='&' ) ? "AND" : "OR" ); return -1; } // MOD can only be over ints if ( iOp=='%' && !( tNode.m_eArgType==SPH_ATTR_INTEGER || tNode.m_eArgType==SPH_ATTR_BIGINT )) { m_sParserError.SetSprintf ( "MOD arguments must be integer" ); return -1; } } else { // check for unknown op assert ( iOp=='/' && "unknown op in AddNodeOp() type deducer" ); } tNode.m_iArgs = 0; if ( iOp==',' ) { if ( iLeft>=0 ) tNode.m_iArgs += ( m_dNodes[iLeft].m_iToken==',' ) ? m_dNodes[iLeft].m_iArgs : 1; if ( iRight>=0 ) tNode.m_iArgs += ( m_dNodes[iRight].m_iToken==',' ) ? m_dNodes[iRight].m_iArgs : 1; } tNode.m_iLeft = iLeft; tNode.m_iRight = iRight; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeFunc ( int iFunc, int iLeft, int iRight ) { // regular case, iLeft is entire arglist, iRight is -1 // special case for IN(), iLeft is arg, iRight is constlist assert ( iFunc>=0 && iFunc=0 ) iArgc = ( m_dNodes[iLeft].m_iToken==',' ) ? m_dNodes[iLeft].m_iArgs : 1; if ( iExpectedArgc<0 ) { if ( iArgc<-iExpectedArgc ) { m_sParserError.SetSprintf ( "%s() called with %d args, at least %d args expected", g_dFuncs[iFunc].m_sName, iArgc, -iExpectedArgc ); return -1; } } else if ( iArgc!=iExpectedArgc ) { m_sParserError.SetSprintf ( "%s() called with %d args, %d args expected", g_dFuncs[iFunc].m_sName, iArgc, iExpectedArgc ); return -1; } } // check arg types // // check for string args // most builtin functions take numeric args only bool bGotString = false, bGotMva = false; if ( iRight<0 ) { CSphVector dRetTypes; GatherArgRetTypes ( iLeft, dRetTypes ); ARRAY_FOREACH ( i, dRetTypes ) { bGotString |= ( dRetTypes[i]==SPH_ATTR_STRING ); bGotMva |= ( dRetTypes[i]==SPH_ATTR_UINT32SET || dRetTypes[i]==SPH_ATTR_UINT32SET ); } } if ( bGotString && eFunc!=FUNC_CRC32 ) { m_sParserError.SetSprintf ( "%s() arguments can not be string", g_dFuncs[iFunc].m_sName ); return -1; } if ( bGotMva && eFunc!=FUNC_IN ) { m_sParserError.SetSprintf ( "%s() arguments can not be MVA", g_dFuncs[iFunc].m_sName ); return -1; } // check that first BITDOT arg is integer or bigint if ( eFunc==FUNC_BITDOT ) { int iLeftmost = iLeft; while ( m_dNodes[iLeftmost].m_iToken==',' ) iLeftmost = m_dNodes[iLeftmost].m_iLeft; ESphAttr eArg = m_dNodes[iLeftmost].m_eRetType; if ( eArg!=SPH_ATTR_INTEGER && eArg!=SPH_ATTR_BIGINT ) { m_sParserError.SetSprintf ( "first BITDOT() argument must be integer" ); return -1; } } // check that first SINT or timestamp family arg is integer if ( eFunc==FUNC_SINT || eFunc==FUNC_DAY || eFunc==FUNC_MONTH || eFunc==FUNC_YEAR || eFunc==FUNC_YEARMONTH || eFunc==FUNC_YEARMONTHDAY || eFunc==FUNC_FIBONACCI ) { assert ( iLeft>=0 ); if ( m_dNodes[iLeft].m_eRetType!=SPH_ATTR_INTEGER ) { m_sParserError.SetSprintf ( "%s() argument must be integer", g_dFuncs[iFunc].m_sName ); return -1; } } // do add ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_FUNC; tNode.m_iFunc = iFunc; tNode.m_iLeft = iLeft; tNode.m_iRight = iRight; tNode.m_eArgType = ( iLeft>=0 ) ? m_dNodes[iLeft].m_eRetType : SPH_ATTR_INTEGER; tNode.m_eRetType = g_dFuncs[iFunc].m_eRet; // fixup return type in a few special cases if ( eFunc==FUNC_MIN || eFunc==FUNC_MAX || eFunc==FUNC_MADD || eFunc==FUNC_MUL3 || eFunc==FUNC_ABS || eFunc==FUNC_IDIV ) tNode.m_eRetType = tNode.m_eArgType; if ( eFunc==FUNC_BIGINT && tNode.m_eRetType==SPH_ATTR_FLOAT ) tNode.m_eRetType = SPH_ATTR_FLOAT; // enforce if we can; FIXME! silently ignores BIGINT() on floats; should warn or raise an error if ( eFunc==FUNC_IF || eFunc==FUNC_BITDOT ) tNode.m_eRetType = GetWidestRet ( iLeft, iRight ); // all ok assert ( tNode.m_eRetType!=SPH_ATTR_NONE ); return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeUdf ( int iCall, int iArg ) { UdfCall_t * pCall = m_dUdfCalls[iCall]; SPH_UDF_INIT & tInit = pCall->m_tInit; SPH_UDF_ARGS & tArgs = pCall->m_tArgs; // initialize UDF right here, at AST creation stage // just because it's easy to gather arg types here if ( iArg>=0 ) { // gather arg types CSphVector dArgTypes; int iCur = iArg; while ( iCur>=0 ) { if ( m_dNodes[iCur].m_iToken!=',' ) { dArgTypes.Add ( m_dNodes[iCur].m_eRetType ); break; } int iRight = m_dNodes[iCur].m_iRight; if ( iRight>=0 ) { assert ( m_dNodes[iRight].m_iToken!=',' ); dArgTypes.Add ( m_dNodes[iRight].m_eRetType ); } iCur = m_dNodes[iCur].m_iLeft; } assert ( dArgTypes.GetLength() ); tArgs.arg_count = dArgTypes.GetLength(); tArgs.arg_types = new sphinx_udf_argtype [ tArgs.arg_count ]; // we gathered internal type ids in right-to-left order // reverse and remap // FIXME! eliminate remap, maybe? ARRAY_FOREACH ( i, dArgTypes ) { sphinx_udf_argtype & eRes = tArgs.arg_types [ tArgs.arg_count-1-i ]; switch ( dArgTypes[i] ) { case SPH_ATTR_INTEGER: case SPH_ATTR_TIMESTAMP: case SPH_ATTR_ORDINAL: case SPH_ATTR_BOOL: case SPH_ATTR_WORDCOUNT: eRes = SPH_UDF_TYPE_UINT32; break; case SPH_ATTR_FLOAT: eRes = SPH_UDF_TYPE_FLOAT; break; case SPH_ATTR_BIGINT: eRes = SPH_UDF_TYPE_INT64; break; case SPH_ATTR_STRING: eRes = SPH_UDF_TYPE_STRING; break; case SPH_ATTR_UINT32SET: eRes = SPH_UDF_TYPE_UINT32SET; break; case SPH_ATTR_UINT64SET: eRes = SPH_UDF_TYPE_UINT64SET; break; default: m_sParserError.SetSprintf ( "internal error: unmapped UDF argument type (arg=%d, type=%d)", i, dArgTypes[i] ); return -1; } } } // init if ( pCall->m_pUdf->m_fnInit ) { char sError [ SPH_UDF_ERROR_LEN ]; if ( pCall->m_pUdf->m_fnInit ( &tInit, &tArgs, sError ) ) { m_sParserError = sError; return -1; } } // do add ExprNode_t & tNode = m_dNodes.Add (); tNode.m_iToken = TOK_UDF; tNode.m_iFunc = iCall; tNode.m_iLeft = iArg; tNode.m_iRight = -1; // deduce type tNode.m_eArgType = ( iArg>=0 ) ? m_dNodes[iArg].m_eRetType : SPH_ATTR_INTEGER; tNode.m_eRetType = pCall->m_pUdf->m_eRetType; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeConstlist ( int64_t iValue ) { ExprNode_t & tNode = m_dNodes.Add(); tNode.m_iToken = TOK_CONST_LIST; tNode.m_pConsts = new ConstList_c(); tNode.m_pConsts->Add ( iValue ); return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeConstlist ( float iValue ) { ExprNode_t & tNode = m_dNodes.Add(); tNode.m_iToken = TOK_CONST_LIST; tNode.m_pConsts = new ConstList_c(); tNode.m_pConsts->Add ( iValue ); return m_dNodes.GetLength()-1; } void ExprParser_t::AppendToConstlist ( int iNode, int64_t iValue ) { m_dNodes[iNode].m_pConsts->Add ( iValue ); } void ExprParser_t::AppendToConstlist ( int iNode, float iValue ) { m_dNodes[iNode].m_pConsts->Add ( iValue ); } int ExprParser_t::AddNodeUservar ( int iUservar ) { ExprNode_t & tNode = m_dNodes.Add(); tNode.m_iToken = TOK_USERVAR; tNode.m_iConst = iUservar; return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeHookIdent ( int iID ) { ExprNode_t & tNode = m_dNodes.Add(); tNode.m_iToken = TOK_HOOK_IDENT; tNode.m_iFunc = iID; tNode.m_eRetType = m_pHook->GetIdentType ( iID ); return m_dNodes.GetLength()-1; } int ExprParser_t::AddNodeHookFunc ( int iID, int iLeft ) { CSphVector dArgTypes; GatherArgRetTypes ( iLeft, dArgTypes ); ESphAttr eRet = m_pHook->GetReturnType ( iID, dArgTypes, CheckForConstSet ( iLeft, 0 ), m_sParserError ); if ( eRet==SPH_ATTR_NONE ) return -1; ExprNode_t & tNode = m_dNodes.Add(); tNode.m_iToken = TOK_HOOK_FUNC; tNode.m_iFunc = iID; tNode.m_iLeft = iLeft; tNode.m_iRight = -1; // deduce type tNode.m_eArgType = ( iLeft>=0 ) ? m_dNodes[iLeft].m_eRetType : SPH_ATTR_INTEGER; tNode.m_eRetType = eRet; return m_dNodes.GetLength()-1; } struct WeightCheck_fn { bool * m_pRes; explicit WeightCheck_fn ( bool * pRes ) : m_pRes ( pRes ) { assert ( m_pRes ); *m_pRes = false; } void Enter ( const ExprNode_t & tNode ) { if ( tNode.m_iToken==TOK_WEIGHT ) *m_pRes = true; } void Exit ( const ExprNode_t & ) {} }; struct HookCheck_fn { ISphExprHook * m_pHook; explicit HookCheck_fn ( ISphExprHook * pHook ) : m_pHook ( pHook ) {} void Enter ( const ExprNode_t & tNode ) { if ( tNode.m_iToken==TOK_HOOK_IDENT || tNode.m_iToken==TOK_HOOK_FUNC ) m_pHook->CheckEnter ( tNode.m_iFunc ); } void Exit ( const ExprNode_t & tNode ) { if ( tNode.m_iToken==TOK_HOOK_IDENT || tNode.m_iToken==TOK_HOOK_FUNC ) m_pHook->CheckExit ( tNode.m_iFunc ); } }; ISphExpr * ExprParser_t::Parse ( const char * sExpr, const CSphSchema & tSchema, ESphAttr * pAttrType, bool * pUsesWeight, CSphString & sError ) { m_sLexerError = ""; m_sParserError = ""; m_sCreateError = ""; // setup lexer m_sExpr = sExpr; m_pCur = sExpr; m_pSchema = &tSchema; // setup constant functions m_iConstNow = (int) time ( NULL ); // build tree m_iParsed = -1; yyparse ( this ); // handle errors if ( m_iParsed<0 || !m_sLexerError.IsEmpty() || !m_sParserError.IsEmpty() ) { sError = !m_sLexerError.IsEmpty() ? m_sLexerError : m_sParserError; if ( sError.IsEmpty() ) sError = "general parsing error"; return NULL; } // deduce return type ESphAttr eAttrType = m_dNodes[m_iParsed].m_eRetType; assert ( eAttrType==SPH_ATTR_INTEGER || eAttrType==SPH_ATTR_BIGINT || eAttrType==SPH_ATTR_FLOAT ); // perform optimizations Optimize ( m_iParsed ); #if 0 Dump ( m_iParsed ); #endif // check expression stack if ( m_dNodes.GetLength()>100 ) { CSphVector dNodes; dNodes.Reserve ( m_dNodes.GetLength()/2 ); int iMaxHeight = 1; int iHeight = 1; dNodes.Add ( m_iParsed ); while ( dNodes.GetLength() ) { const ExprNode_t & tExpr = m_dNodes[dNodes.Pop()]; iHeight += ( tExpr.m_iLeft>=0 || tExpr.m_iRight>=0 ? 1 : -1 ); iMaxHeight = Max ( iMaxHeight, iHeight ); if ( tExpr.m_iRight>=0 ) dNodes.Add ( tExpr.m_iRight ); if ( tExpr.m_iLeft>=0 ) dNodes.Add ( tExpr.m_iLeft ); } #define SPH_EXPRNODE_STACK_SIZE 110 int64_t iExprStack = sphGetStackUsed() + iMaxHeight*SPH_EXPRNODE_STACK_SIZE; if ( sphMyStackSize()<=iExprStack ) { sError.SetSprintf ( "query too complex, not enough stack (thread_stack_size=%dK or higher required)", (int)( ( iExprStack + 1024 - ( iExprStack%1024 ) ) / 1024 ) ); return NULL; } } // create evaluator ISphExpr * pRes = CreateTree ( m_iParsed ); if ( !m_sCreateError.IsEmpty() ) { sError = m_sCreateError; SafeRelease ( pRes ); } else if ( !pRes ) { sError.SetSprintf ( "empty expression" ); } if ( pAttrType ) *pAttrType = eAttrType; if ( pUsesWeight ) { WeightCheck_fn tFunctor ( pUsesWeight ); WalkTree ( m_iParsed, tFunctor ); } if ( m_pHook ) { HookCheck_fn tFunctor ( m_pHook ); WalkTree ( m_iParsed, tFunctor ); } return pRes; } ////////////////////////////////////////////////////////////////////////// // UDF MANAGER ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS #define HAVE_DLOPEN 1 #define RTLD_LAZY 0 #define RTLD_LOCAL 0 void * dlsym ( void * lib, const char * name ) { return GetProcAddress ( (HMODULE)lib, name ); } void * dlopen ( const char * libname, int ) { return LoadLibraryEx ( libname, NULL, 0 ); } int dlclose ( void * lib ) { return FreeLibrary ( (HMODULE)lib ) ? 0 : GetLastError(); } const char * dlerror() { static char sError[256]; DWORD uError = GetLastError(); FormatMessage ( FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, NULL, uError, LANG_SYSTEM_DEFAULT, (LPTSTR)sError, sizeof(sError), NULL ); return sError; } #endif // USE_WINDOWS #if !HAVE_DLOPEN void sphUDFInit ( const char * ) { return; } bool sphUDFCreate ( const char *, const char *, ESphAttr, CSphString & sError ) { sError = "no dlopen(); UDF support disabled"; return false; } bool sphUDFDrop ( const char *, CSphString & sError ) { sError = "no dlopen(); UDF support disabled"; return false; } #else void sphUDFInit ( const char * sUdfDir ) { if ( !sUdfDir || !*sUdfDir ) return; g_sUdfDir = sUdfDir; g_bUdfEnabled = true; } bool sphUDFCreate ( const char * szLib, const char * szFunc, ESphAttr eRetType, CSphString & sError ) { if ( !g_bUdfEnabled ) { sError = "UDF support disabled (requires workers=threads; and a valid plugin_dir)"; return false; } // validate library name for ( const char * p = szLib; *p; p++ ) if ( *p=='/' || *p=='\\' ) { sError = "restricted character (path delimiter) in a library file name"; return false; } // from here, we need a lock (we intend to update UDF hash) g_tUdfMutex.Lock(); // validate function name CSphString sFunc ( szFunc ); sFunc.ToLower(); if ( g_hUdfFuncs ( sFunc ) ) { sError.SetSprintf ( "UDF '%s' already exists", sFunc.cstr() ); g_tUdfMutex.Unlock(); return false; } // lookup or load library CSphString sLib; sLib.SetSprintf ( "%s/%s", g_sUdfDir.cstr(), szLib ); UdfFunc_t tFunc; tFunc.m_eRetType = eRetType; tFunc.m_iUserCount = 0; tFunc.m_bToDrop = false; bool bLoaded = false; void * pHandle = NULL; tFunc.m_pLib = g_hUdfLibs ( sLib ); if ( !tFunc.m_pLib ) { bLoaded = true; pHandle = dlopen ( sLib.cstr(), RTLD_LAZY | RTLD_LOCAL ); if ( !pHandle ) { const char * sDlerror = dlerror(); sError.SetSprintf ( "dlopen() failed: %s", sDlerror ? sDlerror : "(null)" ); g_tUdfMutex.Unlock(); return false; } sphLogDebug ( "dlopen(%s)=%p", sLib.cstr(), pHandle ); } else { pHandle = tFunc.m_pLib->m_pHandle; } assert ( pHandle ); // lookup and check function symbols CSphString sName; tFunc.m_fnFunc = dlsym ( pHandle, sFunc.cstr() ); tFunc.m_fnInit = (UdfInit_fn) dlsym ( pHandle, sName.SetSprintf ( "%s_init", sFunc.cstr() ).cstr() ); tFunc.m_fnDeinit = (UdfDeinit_fn) dlsym ( pHandle, sName.SetSprintf ( "%s_deinit", sFunc.cstr() ).cstr() ); if ( !tFunc.m_fnFunc || !tFunc.m_fnInit ) { sError.SetSprintf ( "symbol '%s%s' not found in '%s'", sFunc.cstr(), tFunc.m_fnFunc ? "_init" : "", szLib ); if ( bLoaded ) dlclose ( pHandle ); g_tUdfMutex.Unlock(); return false; } // add library if ( bLoaded ) { UdfLib_t tLib; tLib.m_iFuncs = 1; tLib.m_pHandle = pHandle; Verify ( g_hUdfLibs.Add ( tLib, sLib ) ); tFunc.m_pLib = g_hUdfLibs ( sLib ); } else { tFunc.m_pLib->m_iFuncs++; } tFunc.m_pLibName = g_hUdfLibs.GetKeyPtr ( sLib ); assert ( tFunc.m_pLib ); // add function Verify ( g_hUdfFuncs.Add ( tFunc, sFunc ) ); // all ok g_tUdfMutex.Unlock(); return true; } bool sphUDFDrop ( const char * szFunc, CSphString & sError ) { CSphString sFunc ( szFunc ); sFunc.ToLower(); g_tUdfMutex.Lock(); UdfFunc_t * pFunc = g_hUdfFuncs ( sFunc ); if ( !pFunc || pFunc->m_bToDrop ) // handle concurrent drop in progress as "not exists" { sError.SetSprintf ( "UDF '%s' does not exist", sFunc.cstr() ); g_tUdfMutex.Unlock(); return false; } const int UDF_DROP_TIMEOUT_SEC = 30; // in seconds int64_t tmEnd = sphMicroTimer() + UDF_DROP_TIMEOUT_SEC*1000000; // mark function for deletion, to prevent new users pFunc->m_bToDrop = true; if ( pFunc->m_iUserCount ) for ( ;; ) { // release lock and wait // so that concurrent users could complete and release the function g_tUdfMutex.Unlock(); sphSleepMsec ( 50 ); // re-acquire lock g_tUdfMutex.Lock(); // everyone out? proceed with dropping assert ( pFunc->m_iUserCount>=0 ); if ( pFunc->m_iUserCount<=0 ) break; // timed out? clear deletion flag, and bail if ( sphMicroTimer() > tmEnd ) { pFunc->m_bToDrop = false; g_tUdfMutex.Unlock(); sError.SetSprintf ( "DROP timed out in (still got %d users after waiting for %d seconds); please retry", pFunc->m_iUserCount, UDF_DROP_TIMEOUT_SEC ); return false; } } UdfLib_t * pLib = pFunc->m_pLib; const CSphString * pLibName = pFunc->m_pLibName; Verify ( g_hUdfFuncs.Delete ( sFunc ) ); if ( --pLib->m_iFuncs<=0 ) { // FIXME! running queries might be using this function int iRes = dlclose ( pLib->m_pHandle ); sphLogDebug ( "dlclose(%s)=%d", pLibName->cstr(), iRes ); Verify ( g_hUdfLibs.Delete ( *pLibName ) ); } g_tUdfMutex.Unlock(); return true; } #endif // HAVE_DLOPEN ////////////////////////////////////////////////////////////////////////// // PUBLIC STUFF ////////////////////////////////////////////////////////////////////////// /// parser entry point ISphExpr * sphExprParse ( const char * sExpr, const CSphSchema & tSchema, ESphAttr * pAttrType, bool * pUsesWeight, CSphString & sError, CSphSchema * pExtra, ISphExprHook * pHook ) { // parse into opcodes ExprParser_t tParser ( pExtra, pHook ); return tParser.Parse ( sExpr, tSchema, pAttrType, pUsesWeight, sError ); } // // $Id: sphinxexpr.cpp 3129 2012-03-01 07:18:52Z tomat $ // sphinx-2.0.4-release/src/sphinxsearch.h0000644000176700017710000001231311711621267017406 0ustar deogardeogar// // $Id: sphinxsearch.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxsearch_ #define _sphinxsearch_ #include "sphinx.h" #include "sphinxquery.h" ////////////////////////////////////////////////////////////////////////// // PACKED HIT MACROS ////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////// /// term modifiers enum TermPosFilter_e { TERM_POS_FIELD_LIMIT = 1, TERM_POS_FIELD_START, TERM_POS_FIELD_END, TERM_POS_FIELD_STARTEND, TERM_POS_ZONES }; /// term, searcher view class ISphQword { public: // setup by query parser CSphString m_sWord; ///< my copy of word CSphString m_sDictWord; ///< word after being processed by dict (eg. stemmed) SphWordID_t m_iWordID; ///< word ID, from dictionary int m_iTermPos; int m_iAtomPos; ///< word position, from query bool m_bExpanded; ///< added by prefix expansion bool m_bExcluded; ///< excluded by the query (rval to operator NOT) // setup by QwordSetup() int m_iDocs; ///< document count, from wordlist int m_iHits; ///< hit count, from wordlist bool m_bHasHitlist; ///< hitlist presence flag // iterator state CSphSmallBitvec m_dQwordFields; ///< current match fields DWORD m_uMatchHits; ///< current match hits count SphOffset_t m_iHitlistPos; ///< current position in hitlist, from doclist protected: bool m_bAllFieldsKnown; ///< whether the all match fields is known, or only low 32. public: ISphQword () : m_iWordID ( 0 ) , m_iTermPos ( 0 ) , m_iAtomPos ( 0 ) , m_bExpanded ( false ) , m_bExcluded ( false ) , m_iDocs ( 0 ) , m_iHits ( 0 ) , m_bHasHitlist ( true ) , m_uMatchHits ( 0 ) , m_iHitlistPos ( 0 ) , m_bAllFieldsKnown ( false ) { m_dQwordFields.Unset(); } virtual ~ISphQword () {} virtual const CSphMatch & GetNextDoc ( DWORD * pInlineDocinfo ) = 0; virtual void SeekHitlist ( SphOffset_t uOff ) = 0; virtual Hitpos_t GetNextHit () = 0; virtual void CollectHitMask (); virtual void Reset () { m_iDocs = 0; m_iHits = 0; m_dQwordFields.Unset(); m_bAllFieldsKnown = false; m_uMatchHits = 0; m_iHitlistPos = 0; } }; /// term setup, searcher view class CSphQueryNodeCache; class ISphZoneCheck; class ISphQwordSetup : ISphNoncopyable { public: CSphDict * m_pDict; const CSphIndex * m_pIndex; ESphDocinfo m_eDocinfo; CSphMatch m_tMin; int m_iInlineRowitems; ///< inline rowitems count int m_iDynamicRowitems; ///< dynamic rowitems counts (including (!) inline) int64_t m_iMaxTimer; CSphString * m_pWarning; CSphQueryContext * m_pCtx; CSphQueryNodeCache * m_pNodeCache; mutable ISphZoneCheck * m_pZoneChecker; ISphQwordSetup () : m_pDict ( NULL ) , m_pIndex ( NULL ) , m_eDocinfo ( SPH_DOCINFO_NONE ) , m_iInlineRowitems ( 0 ) , m_iDynamicRowitems ( 0 ) , m_iMaxTimer ( 0 ) , m_pWarning ( NULL ) , m_pCtx ( NULL ) , m_pNodeCache ( NULL ) , m_pZoneChecker ( NULL ) {} virtual ~ISphQwordSetup () {} virtual ISphQword * QwordSpawn ( const XQKeyword_t & tWord ) const = 0; virtual bool QwordSetup ( ISphQword * pQword ) const = 0; }; ////////////////////////////////////////////////////////////////////////// /// generic ranker interface class ISphRanker { public: virtual ~ISphRanker () {} virtual CSphMatch * GetMatchesBuffer() = 0; virtual int GetMatches () = 0; virtual void Reset ( const ISphQwordSetup & tSetup ) = 0; }; /// factory ISphRanker * sphCreateRanker ( const XQQuery_t & tXQ, const CSphQuery * pQuery, CSphQueryResult * pResult, const ISphQwordSetup & tTermSetup, const CSphQueryContext & tCtx ); ////////////////////////////////////////////////////////////////////////// /// hit mark, used for snippets generation struct SphHitMark_t { DWORD m_uPosition; DWORD m_uSpan; bool operator == ( const SphHitMark_t & rhs ) const { return m_uPosition==rhs.m_uPosition && m_uSpan==rhs.m_uSpan; } }; /// hit marker, used for snippets generation class CSphHitMarker { public: class ExtNode_i * m_pRoot; public: CSphHitMarker() : m_pRoot ( NULL ) {} ~CSphHitMarker(); void Mark ( CSphVector & ); static CSphHitMarker * Create ( const XQNode_t * pRoot, const ISphQwordSetup & tSetup ); }; ////////////////////////////////////////////////////////////////////////// /// intra-batch node cache class CSphQueryNodeCache { friend class NodeCacheContainer_t; protected: class NodeCacheContainer_t * m_pPool; int m_iMaxCachedDocs; int m_iMaxCachedHits; public: CSphQueryNodeCache ( int iCells, int MaxCachedDocs, int MaxCachedHits ); ~CSphQueryNodeCache (); ExtNode_i * CreateProxy ( ExtNode_i * pChild, const XQNode_t * pRawChild, const ISphQwordSetup & tSetup ); }; #endif // _sphinxsearch_ // // $Id: sphinxsearch.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxint.h0000644000176700017710000011567211721250154016741 0ustar deogardeogar// // $Id: sphinxint.h 3117 2012-02-22 20:30:36Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxint_ #define _sphinxint_ #include "sphinx.h" #include "sphinxfilter.h" #include "sphinxrt.h" #include #include #include ////////////////////////////////////////////////////////////////////////// // INTERNAL CONSTANTS ////////////////////////////////////////////////////////////////////////// #ifdef O_BINARY #define SPH_O_BINARY O_BINARY #else #define SPH_O_BINARY 0 #endif #define SPH_O_READ ( O_RDONLY | SPH_O_BINARY ) #define SPH_O_NEW ( O_CREAT | O_RDWR | O_TRUNC | SPH_O_BINARY ) #define MVA_DOWNSIZE DWORD // MVA32 offset type #define MVA_OFFSET_MASK 0x7fffffffUL // MVA offset mask #define MVA_ARENA_FLAG 0x80000000UL // MVA global-arena flag ////////////////////////////////////////////////////////////////////////// const DWORD INDEX_MAGIC_HEADER = 0x58485053; ///< my magic 'SPHX' header const DWORD INDEX_FORMAT_VERSION = 26; ///< my format version const char MAGIC_SYNONYM_WHITESPACE = 1; // used internally in tokenizer only const char MAGIC_CODE_SENTENCE = 2; // emitted from tokenizer on sentence boundary const char MAGIC_CODE_PARAGRAPH = 3; // emitted from stripper (and passed via tokenizer) on paragraph boundary const char MAGIC_CODE_ZONE = 4; // emitted from stripper (and passed via tokenizer) on zone boundary; followed by zero-terminated zone name const char MAGIC_WORD_HEAD = 1; // prepended to keyword by source, stored in (crc) dictionary const char MAGIC_WORD_TAIL = 1; // appended to keyword by source, stored in (crc) dictionary const char MAGIC_WORD_HEAD_NONSTEMMED = 2; // prepended to keyword by source, stored in dictionary extern const char * MAGIC_WORD_SENTENCE; extern const char * MAGIC_WORD_PARAGRAPH; ////////////////////////////////////////////////////////////////////////// // INTERNAL GLOBALS ////////////////////////////////////////////////////////////////////////// /// binlog, defind in sphinxrt.cpp extern class ISphBinlog * g_pBinlog; ////////////////////////////////////////////////////////////////////////// // INTERNAL HELPER FUNCTIONS, CLASSES, ETC ////////////////////////////////////////////////////////////////////////// /// file writer with write buffering and int encoder class CSphWriter : ISphNoncopyable { public: CSphWriter (); virtual ~CSphWriter (); void SetBufferSize ( int iBufferSize ); ///< tune write cache size; must be called before OpenFile() or SetFile() bool OpenFile ( const CSphString & sName, CSphString & sErrorBuffer ); void SetFile ( int iFD, SphOffset_t * pSharedOffset ); void CloseFile ( bool bTruncate = false ); ///< note: calls Flush(), ie. IsError() might get true after this call void UnlinkFile (); /// some shit happened (outside) and the file is no more actual. void PutByte ( int uValue ); void PutBytes ( const void * pData, int iSize ); void PutDword ( DWORD uValue ) { PutBytes ( &uValue, sizeof(DWORD) ); } void PutOffset ( SphOffset_t uValue ) { PutBytes ( &uValue, sizeof(SphOffset_t) ); } void PutString ( const char * szString ); void PutString ( const CSphString & sString ); void SeekTo ( SphOffset_t pos ); ///< seeking inside the buffer will truncate it #if USE_64BIT void PutDocid ( SphDocID_t uValue ) { PutOffset ( uValue ); } #else void PutDocid ( SphDocID_t uValue ) { PutDword ( uValue ); } #endif void ZipInt ( DWORD uValue ); void ZipOffset ( SphOffset_t uValue ); void ZipOffsets ( CSphVector * pData ); bool IsError () const { return m_bError; } SphOffset_t GetPos () const { return m_iPos; } protected: CSphString m_sName; SphOffset_t m_iPos; SphOffset_t m_iWritten; int m_iFD; int m_iPoolUsed; BYTE * m_pBuffer; BYTE * m_pPool; bool m_bOwnFile; SphOffset_t * m_pSharedOffset; int m_iBufferSize; bool m_bError; CSphString * m_pError; virtual void Flush (); }; /// file which closes automatically when going out of scope class CSphAutofile : ISphNoncopyable { protected: int m_iFD; ///< my file descriptior CSphString m_sFilename; ///< my file name bool m_bTemporary; ///< whether to unlink this file on Close() bool m_bWouldTemporary; ///< backup of the m_bTemporary CSphIndex::ProgressCallback_t * m_pProgress; ///< for displaying progress CSphIndexProgress * m_pStat; public: CSphAutofile (); CSphAutofile ( const CSphString & sName, int iMode, CSphString & sError, bool bTemp=false ); ~CSphAutofile (); int Open ( const CSphString & sName, int iMode, CSphString & sError, bool bTemp=false ); void Close (); void SetTemporary(); ///< would be set if a shit happened and the file is not actual. public: int GetFD () const { return m_iFD; } const char * GetFilename () const; SphOffset_t GetSize ( SphOffset_t iMinSize, bool bCheckSizeT, CSphString & sError ); SphOffset_t GetSize (); bool Read ( void * pBuf, size_t uCount, CSphString & sError ); void SetProgressCallback ( CSphIndex::ProgressCallback_t * pfnProgress, CSphIndexProgress * pStat ); }; /// file reader with read buffering and int decoder class CSphReader { public: CSphReader ( BYTE * pBuf=NULL, int iSize=0 ); virtual ~CSphReader (); void SetBuffers ( int iReadBuffer, int iReadUnhinted ); void SetFile ( int iFD, const char * sFilename ); void SetFile ( const CSphAutofile & tFile ); void Reset (); void SeekTo ( SphOffset_t iPos, int iSizeHint ); void SkipBytes ( int iCount ); SphOffset_t GetPos () const { return m_iPos+m_iBuffPos; } void GetBytes ( void * pData, int iSize ); int GetBytesZerocopy ( const BYTE ** ppData, int iMax ); ///< zerocopy method; returns actual length present in buffer (upto iMax) int GetByte (); DWORD GetDword (); SphOffset_t GetOffset (); CSphString GetString (); int GetLine ( char * sBuffer, int iMaxLen ); DWORD UnzipInt (); SphOffset_t UnzipOffset (); SphOffset_t Tell () const { return m_iPos + m_iBuffPos; } bool GetErrorFlag () const { return m_bError; } const CSphString & GetErrorMessage () const { return m_sError; } const CSphString & GetFilename() const { return m_sFilename; } #if USE_64BIT SphDocID_t GetDocid () { return GetOffset(); } SphDocID_t UnzipDocid () { return UnzipOffset(); } SphWordID_t UnzipWordid () { return UnzipOffset(); } #else SphDocID_t GetDocid () { return GetDword(); } SphDocID_t UnzipDocid () { return UnzipInt(); } SphWordID_t UnzipWordid () { return UnzipInt(); } #endif const CSphReader & operator = ( const CSphReader & rhs ); protected: int m_iFD; SphOffset_t m_iPos; int m_iBuffPos; int m_iBuffUsed; BYTE * m_pBuff; int m_iSizeHint; ///< how much do we expect to read int m_iBufSize; bool m_bBufOwned; int m_iReadUnhinted; bool m_bError; CSphString m_sError; CSphString m_sFilename; private: void UpdateCache (); }; /// scoped reader class CSphAutoreader : public CSphReader { public: CSphAutoreader ( BYTE * pBuf=NULL, int iSize=0 ) : CSphReader ( pBuf, iSize ) {} ~CSphAutoreader (); bool Open ( const CSphString & sFilename, CSphString & sError ); void Close (); SphOffset_t GetFilesize (); public: // added for DebugCheck() int GetFD () { return m_iFD; } }; ////////////////////////////////////////////////////////////////////////// /// per-query search context /// everything that index needs to compute/create to process the query class CSphQueryContext { public: // searching-only, per-query int m_iWeights; ///< search query field weights count int m_dWeights [ SPH_MAX_FIELDS ]; ///< search query field weights bool m_bLookupFilter; ///< row data lookup required at filtering stage bool m_bLookupSort; ///< row data lookup required at sorting stage ISphFilter * m_pFilter; ISphFilter * m_pWeightFilter; struct CalcItem_t { CSphAttrLocator m_tLoc; ///< result locator ESphAttr m_eType; ///< result type ISphExpr * m_pExpr; ///< evaluator (non-owned) }; CSphVector m_dCalcFilter; ///< items to compute for filtering CSphVector m_dCalcSort; ///< items to compute for sorting/grouping CSphVector m_dCalcFinal; ///< items to compute when finalizing result set const CSphVector * m_pOverrides; ///< overridden attribute values CSphVector m_dOverrideIn; CSphVector m_dOverrideOut; void * m_pIndexData; ///< backend specific data public: CSphQueryContext (); ~CSphQueryContext (); void BindWeights ( const CSphQuery * pQuery, const CSphSchema & tSchema, int iIndexWeight ); bool SetupCalc ( CSphQueryResult * pResult, const CSphSchema & tInSchema, const CSphSchema & tSchema, const DWORD * pMvaPool ); bool CreateFilters ( bool bFullscan, const CSphVector * pdFilters, const CSphSchema & tSchema, const DWORD * pMvaPool, CSphString & sError ); bool SetupOverrides ( const CSphQuery * pQuery, CSphQueryResult * pResult, const CSphSchema & tIndexSchema ); void CalcFilter ( CSphMatch & tMatch ) const; void CalcSort ( CSphMatch & tMatch ) const; void CalcFinal ( CSphMatch & tMatch ) const; // rt index bind pools at segment searching, not at time it setups context void SetStringPool ( const BYTE * pStrings ); void SetMVAPool ( const DWORD * pMva ); }; ////////////////////////////////////////////////////////////////////////// // MEMORY TRACKER ////////////////////////////////////////////////////////////////////////// namespace Memory { enum Category_e { SPH_MEM_CORE, SPH_MEM_IDX_DISK, SPH_MEM_IDX_RT, SPH_MEM_IDX_RT_ACCUM, SPH_MEM_MMAPED, SPH_MEM_BINLOG, SPH_MEM_HANDLE_NONSQL, SPH_MEM_HANDLE_SQL, SPH_MEM_SEARCH_NONSQL, SPH_MEM_QUERY_NONSQL, SPH_MEM_INSERT_SQL, SPH_MEM_SELECT_SQL, SPH_MEM_DELETE_SQL, SPH_MEM_COMMIT_SET_SQL, SPH_MEM_COMMIT_BEGIN_SQL, SPH_MEM_COMMIT_SQL, SPH_MEM_IDX_DISK_MULTY_QUERY, SPH_MEM_IDX_DISK_MULTY_QUERY_EX, SPH_MEM_IDX_RT_MULTY_QUERY, SPH_MEM_IDX_RT_RES_MATCHES, SPH_MEM_IDX_RT_RES_STRINGS, SPH_MEM_TOTAL }; } #if SPH_ALLOCS_PROFILER void sphMemStatPush ( Memory::Category_e eCategory ); void sphMemStatPop ( Memory::Category_e eCategory ); // memory tracker struct MemTracker_c : ISphNoncopyable { const Memory::Category_e m_eCategory; ///< category /// ctor explicit MemTracker_c ( Memory::Category_e eCategory ) : m_eCategory ( eCategory ) { sphMemStatPush ( m_eCategory ); } /// dtor ~MemTracker_c () { sphMemStatPop ( m_eCategory ); } }; #define MEMORY(name) MemTracker_c tracker_##__LINE__##name(Memory::name); #else // SPH_ALLOCS_PROFILER 0 #define MEMORY(name) #endif // if SPH_ALLOCS_PROFILER ////////////////////////////////////////////////////////////////////////// // BLOCK-LEVEL ATTRIBUTE INDEX BUILDER ////////////////////////////////////////////////////////////////////////// #define DOCINFO_INDEX_FREQ 128 // FIXME? make this configurable inline uint64_t MVA_UPSIZE ( const DWORD * pMva ) { uint64_t uMva = (uint64_t)pMva[0] | ( ( (uint64_t)pMva[1] )<<32 ); return uMva; } struct CSphDocMVA { SphDocID_t m_iDocID; CSphVector < CSphVector > m_dMVA; CSphVector < DWORD > m_dOffsets; explicit CSphDocMVA ( int iSize ) : m_iDocID ( 0 ) { m_dMVA.Resize ( iSize ); m_dOffsets.Resize ( iSize ); } void Read ( CSphReader & tReader ); void Write ( CSphWriter & tWriter ); }; /// attr min-max builder template < typename DOCID = SphDocID_t > class AttrIndexBuilder_t : ISphNoncopyable { private: CSphVector m_dIntAttrs; CSphVector m_dFloatAttrs; CSphVector m_dMvaAttrs; CSphVector m_dIntMin; CSphVector m_dIntMax; CSphVector m_dIntIndexMin; CSphVector m_dIntIndexMax; CSphVector m_dFloatMin; CSphVector m_dFloatMax; CSphVector m_dFloatIndexMin; CSphVector m_dFloatIndexMax; CSphVector m_dMvaMin; CSphVector m_dMvaMax; CSphVector m_dMvaIndexMin; CSphVector m_dMvaIndexMax; DWORD m_uStride; // size of attribute's chunk (in DWORDs) DWORD m_uElements; // counts total number of collected min/max pairs int m_iLoop; // loop inside one set DWORD * m_pOutBuffer; // storage for collected min/max DWORD * m_pOutMax; // storage max for bound checking DOCID m_uStart; // first and last docids of current chunk DOCID m_uLast; DOCID m_uIndexStart; // first and last docids of whole index DOCID m_uIndexLast; int m_iMva64; private: void ResetLocal(); void FlushComputed ( bool bUseAttrs, bool bUseMvas ); void UpdateMinMaxDocids ( DOCID uDocID ); void CollectRowMVA ( int iAttr, DWORD uCount, const DWORD * pMva ); public: explicit AttrIndexBuilder_t ( const CSphSchema & tSchema ); void Prepare ( DWORD * pOutBuffer, DWORD * pOutMax ); void CollectWithoutMvas ( const DWORD * pCur, bool bUseMvas ); bool Collect ( const DWORD * pCur, const DWORD * pMvas, int64_t iMvasCount, CSphString & sError, bool bHasMvaID ); void Collect ( const DWORD * pCur, const struct CSphDocMVA & dMvas ); void CollectMVA ( DOCID uDocID, const CSphVector< CSphVector > & dCurInfo ); void FinishCollect ( bool bMvaOnly = false ); /// actually used part of output buffer, only used with index merge /// (we reserve space for rows from both indexes, but might kill some rows) inline DWORD GetActualSize() const { return 2 * m_uElements * m_uStride; } /// how many DWORDs will we need for block index inline DWORD GetExpectedSize ( DWORD uMaxDocs ) const { DWORD uDocinfoIndex = ( uMaxDocs + DOCINFO_INDEX_FREQ - 1 ) / DOCINFO_INDEX_FREQ; return 2 * ( 1 + uDocinfoIndex ) * m_uStride; } }; typedef AttrIndexBuilder_t<> AttrIndexBuilder_c; // dirty hack for some build systems which not has LLONG_MAX #ifndef LLONG_MAX #define LLONG_MAX (((unsigned long long)(-1))>>1) #endif template < typename DOCID > void AttrIndexBuilder_t::ResetLocal() { ARRAY_FOREACH ( i, m_dIntMin ) { m_dIntMin[i] = LLONG_MAX; m_dIntMax[i] = 0; } ARRAY_FOREACH ( i, m_dFloatMin ) { m_dFloatMin[i] = FLT_MAX; m_dFloatMax[i] = -FLT_MAX; } ARRAY_FOREACH ( i, m_dMvaMin ) { m_dMvaMin[i] = LLONG_MAX; m_dMvaMax[i] = 0; } m_uStart = m_uLast = 0; m_iLoop = 0; } template < typename DOCID > void AttrIndexBuilder_t::FlushComputed ( bool bUseAttrs, bool bUseMvas ) { assert ( m_pOutBuffer ); DWORD * pMinEntry = m_pOutBuffer + 2 * m_uElements * m_uStride; DWORD * pMinAttrs = DOCINFO2ATTRS ( pMinEntry ); DWORD * pMaxEntry = pMinEntry + m_uStride; DWORD * pMaxAttrs = pMinAttrs + m_uStride; assert ( pMaxEntry+m_uStride<=m_pOutMax ); assert ( pMaxAttrs+m_uStride-DOCINFO_IDSIZE<=m_pOutMax ); m_uIndexLast = m_uLast; DOCINFOSETID ( pMinEntry, m_uStart ); DOCINFOSETID ( pMaxEntry, m_uLast ); if ( bUseAttrs ) { ARRAY_FOREACH ( i, m_dIntAttrs ) { m_dIntIndexMin[i] = Min ( m_dIntIndexMin[i], m_dIntMin[i] ); m_dIntIndexMax[i] = Max ( m_dIntIndexMax[i], m_dIntMax[i] ); sphSetRowAttr ( pMinAttrs, m_dIntAttrs[i], m_dIntMin[i] ); sphSetRowAttr ( pMaxAttrs, m_dIntAttrs[i], m_dIntMax[i] ); } ARRAY_FOREACH ( i, m_dFloatAttrs ) { m_dFloatIndexMin[i] = Min ( m_dFloatIndexMin[i], m_dFloatMin[i] ); m_dFloatIndexMax[i] = Max ( m_dFloatIndexMax[i], m_dFloatMax[i] ); sphSetRowAttr ( pMinAttrs, m_dFloatAttrs[i], sphF2DW ( m_dFloatMin[i] ) ); sphSetRowAttr ( pMaxAttrs, m_dFloatAttrs[i], sphF2DW ( m_dFloatMax[i] ) ); } } if ( bUseMvas ) ARRAY_FOREACH ( i, m_dMvaAttrs ) { m_dMvaIndexMin[i] = Min ( m_dMvaIndexMin[i], m_dMvaMin[i] ); m_dMvaIndexMax[i] = Max ( m_dMvaIndexMax[i], m_dMvaMax[i] ); sphSetRowAttr ( pMinAttrs, m_dMvaAttrs[i], m_dMvaMin[i] ); sphSetRowAttr ( pMaxAttrs, m_dMvaAttrs[i], m_dMvaMax[i] ); } m_uElements++; ResetLocal(); } template < typename DOCID > void AttrIndexBuilder_t::UpdateMinMaxDocids ( DOCID uDocID ) { if ( !m_uStart ) m_uStart = uDocID; if ( !m_uIndexStart ) m_uIndexStart = uDocID; m_uLast = uDocID; } template < typename DOCID > AttrIndexBuilder_t::AttrIndexBuilder_t ( const CSphSchema & tSchema ) : m_uStride ( DWSIZEOF(DOCID) + tSchema.GetRowSize() ) , m_uElements ( 0 ) , m_iLoop ( 0 ) , m_pOutBuffer ( NULL ) , m_pOutMax ( NULL ) , m_uStart ( 0 ) , m_uLast ( 0 ) , m_uIndexStart ( 0 ) , m_uIndexLast ( 0 ) { for ( int i=0; i void AttrIndexBuilder_t::Prepare ( DWORD * pOutBuffer, DWORD * pOutMax ) { m_pOutBuffer = pOutBuffer; m_pOutMax = pOutMax; m_uElements = 0; m_uIndexStart = m_uIndexLast = 0; ARRAY_FOREACH ( i, m_dIntIndexMin ) { m_dIntIndexMin[i] = LLONG_MAX; m_dIntIndexMax[i] = 0; } ARRAY_FOREACH ( i, m_dFloatIndexMin ) { m_dFloatIndexMin[i] = FLT_MAX; m_dFloatIndexMax[i] = -FLT_MAX; } ARRAY_FOREACH ( i, m_dMvaIndexMin ) { m_dMvaIndexMin[i] = LLONG_MAX; m_dMvaIndexMax[i] = 0; } ResetLocal(); } template < typename DOCID > void AttrIndexBuilder_t::CollectWithoutMvas ( const DWORD * pCur, bool bUseMvas ) { // check if it is time to flush already collected values if ( m_iLoop>=DOCINFO_INDEX_FREQ ) FlushComputed ( true, bUseMvas ); const DWORD * pRow = DOCINFO2ATTRS_T(pCur); UpdateMinMaxDocids ( DOCINFO2ID_T(pCur) ); m_iLoop++; // ints ARRAY_FOREACH ( i, m_dIntAttrs ) { SphAttr_t uVal = sphGetRowAttr ( pRow, m_dIntAttrs[i] ); m_dIntMin[i] = Min ( m_dIntMin[i], uVal ); m_dIntMax[i] = Max ( m_dIntMax[i], uVal ); } // floats ARRAY_FOREACH ( i, m_dFloatAttrs ) { float fVal = sphDW2F ( (DWORD)sphGetRowAttr ( pRow, m_dFloatAttrs[i] ) ); m_dFloatMin[i] = Min ( m_dFloatMin[i], fVal ); m_dFloatMax[i] = Max ( m_dFloatMax[i], fVal ); } } template < typename DOCID > void AttrIndexBuilder_t::CollectRowMVA ( int iAttr, DWORD uCount, const DWORD * pMva ) { if ( iAttr>=m_iMva64 ) { assert ( ( uCount%2 )==0 ); for ( ; uCount>0; uCount-=2, pMva+=2 ) { uint64_t uVal = MVA_UPSIZE ( pMva ); m_dMvaMin[iAttr] = Min ( m_dMvaMin[iAttr], uVal ); m_dMvaMax[iAttr] = Max ( m_dMvaMax[iAttr], uVal ); } } else { for ( ; uCount>0; uCount--, pMva++ ) { DWORD uVal = *pMva; m_dMvaMin[iAttr] = Min ( m_dMvaMin[iAttr], uVal ); m_dMvaMax[iAttr] = Max ( m_dMvaMax[iAttr], uVal ); } } } template < typename DOCID > bool AttrIndexBuilder_t::Collect ( const DWORD * pCur, const DWORD * pMvas, int64_t iMvasCount, CSphString & sError, bool bHasMvaID ) { CollectWithoutMvas ( pCur, true ); const DWORD * pRow = DOCINFO2ATTRS_T(pCur); SphDocID_t uDocID = DOCINFO2ID_T(pCur); // MVAs ARRAY_FOREACH ( i, m_dMvaAttrs ) { SphAttr_t uOff = sphGetRowAttr ( pRow, m_dMvaAttrs[i] ); if ( !uOff ) continue; // sanity checks if ( uOff>=iMvasCount ) { sError.SetSprintf ( "broken index: mva offset out of bounds, id=" DOCID_FMT, (SphDocID_t)uDocID ); return false; } const DWORD * pMva = pMvas + uOff; // don't care about updates at this point if ( bHasMvaID && i==0 && DOCINFO2ID_T ( pMva-DWSIZEOF(DOCID) )!=uDocID ) { sError.SetSprintf ( "broken index: mva docid verification failed, id=" DOCID_FMT, (SphDocID_t)uDocID ); return false; } DWORD uCount = *pMva++; if ( ( uOff+uCount>=iMvasCount ) || ( i>=m_iMva64 && ( uCount%2 )!=0 ) ) { sError.SetSprintf ( "broken index: mva list out of bounds, id=" DOCID_FMT, (SphDocID_t)uDocID ); return false; } // walk and calc CollectRowMVA ( i, uCount, pMva ); } return true; } template < typename DOCID > void AttrIndexBuilder_t::Collect ( const DWORD * pCur, const CSphDocMVA & dMvas ) { CollectWithoutMvas ( pCur, true ); ARRAY_FOREACH ( i, m_dMvaAttrs ) { CollectRowMVA ( i, dMvas.m_dMVA[i].GetLength(), dMvas.m_dMVA[i].Begin() ); } } template < typename DOCID > void AttrIndexBuilder_t::CollectMVA ( DOCID uDocID, const CSphVector< CSphVector > & dCurInfo ) { // check if it is time to flush already collected values if ( m_iLoop>=DOCINFO_INDEX_FREQ ) FlushComputed ( false, true ); UpdateMinMaxDocids ( uDocID ); m_iLoop++; ARRAY_FOREACH ( i, dCurInfo ) { CollectRowMVA ( i, dCurInfo[i].GetLength(), dCurInfo[i].Begin() ); } } template < typename DOCID > void AttrIndexBuilder_t::FinishCollect ( bool bMvaOnly ) { assert ( m_pOutBuffer ); if ( m_iLoop ) FlushComputed ( !bMvaOnly, true ); DWORD * pMinEntry = m_pOutBuffer + 2 * m_uElements * m_uStride; DWORD * pMaxEntry = pMinEntry + m_uStride; CSphRowitem * pMinAttrs = DOCINFO2ATTRS_T ( pMinEntry ); CSphRowitem * pMaxAttrs = DOCINFO2ATTRS_T ( pMaxEntry ); assert ( pMaxEntry+m_uStride<=m_pOutMax ); assert ( pMaxAttrs+m_uStride-DWSIZEOF(DOCID)<=m_pOutMax ); DOCINFOSETID ( pMinEntry, m_uIndexStart ); DOCINFOSETID ( pMaxEntry, m_uIndexLast ); ARRAY_FOREACH ( i, m_dMvaAttrs ) { sphSetRowAttr ( pMinAttrs, m_dMvaAttrs[i], m_dMvaIndexMin[i] ); sphSetRowAttr ( pMaxAttrs, m_dMvaAttrs[i], m_dMvaIndexMax[i] ); } if ( !bMvaOnly ) { ARRAY_FOREACH ( i, m_dIntAttrs ) { sphSetRowAttr ( pMinAttrs, m_dIntAttrs[i], m_dIntIndexMin[i] ); sphSetRowAttr ( pMaxAttrs, m_dIntAttrs[i], m_dIntIndexMax[i] ); } ARRAY_FOREACH ( i, m_dFloatAttrs ) { sphSetRowAttr ( pMinAttrs, m_dFloatAttrs[i], sphF2DW ( m_dFloatIndexMin[i] ) ); sphSetRowAttr ( pMaxAttrs, m_dFloatAttrs[i], sphF2DW ( m_dFloatIndexMax[i] ) ); } m_uElements++; } else { m_uElements = 0; // rewind back for collecting the rest of attributes. } } ////////////////////////////////////////////////////////////////////////// // INLINES, FIND_XXX() GENERIC FUNCTIONS ////////////////////////////////////////////////////////////////////////// /// find a value-enclosing span in a sorted vector (aka an index at which vec[i] <= val < vec[i+1]) template < typename T > static int FindSpan ( const CSphVector & dVec, T tRef, int iSmallTreshold=8 ) { // empty vector if ( !dVec.GetLength() ) return -1; // check last semi-span if ( dVec.Last()1 ) { if ( tRef<*pStart || *pEnd>= iBits; } iBits >>= 1; uMask >>= iBits; } return iIdx; } ////////////////////////////////////////////////////////////////////////// // INLINES, UTF-8 TOOLS ////////////////////////////////////////////////////////////////////////// /// decode UTF-8 codepoint /// advances buffer ptr in all cases but end of buffer /// /// returns -1 on failure /// returns 0 on end of buffer /// returns codepoint on success inline int sphUTF8Decode ( BYTE * & pBuf ) { BYTE v = *pBuf; if ( !v ) return 0; pBuf++; // check for 7-bit case if ( v<128 ) return v; // get number of bytes int iBytes = 0; while ( v & 0x80 ) { iBytes++; v <<= 1; } // check for valid number of bytes if ( iBytes<2 || iBytes>4 ) return -1; int iCode = ( v >> iBytes ); iBytes--; do { if ( !(*pBuf) ) return 0; // unexpected eof if ( ((*pBuf) & 0xC0)!=0x80 ) return -1; // invalid code iCode = ( iCode<<6 ) + ( (*pBuf) & 0x3F ); iBytes--; pBuf++; } while ( iBytes ); // all good return iCode; } /// encode UTF-8 codepoint to buffer /// returns number of bytes used inline int sphUTF8Encode ( BYTE * pBuf, int iCode ) { if ( iCode<0x80 ) { pBuf[0] = (BYTE)( iCode & 0x7F ); return 1; } else if ( iCode<0x800 ) { pBuf[0] = (BYTE)( ( (iCode>>6) & 0x1F ) | 0xC0 ); pBuf[1] = (BYTE)( ( iCode & 0x3F ) | 0x80 ); return 2; } else { pBuf[0] = (BYTE)( ( (iCode>>12) & 0x0F ) | 0xE0 ); pBuf[1] = (BYTE)( ( (iCode>>6) & 0x3F ) | 0x80 ); pBuf[2] = (BYTE)( ( iCode & 0x3F ) | 0x80 ); return 3; } } /// compute UTF-8 string length in codepoints inline int sphUTF8Len ( const char * pStr ) { if ( !pStr || *pStr=='\0' ) return 0; BYTE * pBuf = (BYTE*) pStr; int iRes = 0, iCode; while ( ( iCode = sphUTF8Decode(pBuf) )!=0 ) if ( iCode>0 ) iRes++; return iRes; } /// compute UTF-8 string length in codepoints inline int sphUTF8Len ( const char * pStr, int iMax ) { if ( !pStr || *pStr=='\0' ) return 0; BYTE * pBuf = (BYTE*) pStr; BYTE * pMax = pBuf + iMax; int iRes = 0, iCode; while ( pBuf0 ) iRes++; return iRes; } ////////////////////////////////////////////////////////////////////////// // MATCHING ENGINE INTERNALS ////////////////////////////////////////////////////////////////////////// /// hit in the stream struct ExtHit_t { SphDocID_t m_uDocid; Hitpos_t m_uHitpos; WORD m_uQuerypos; WORD m_uNodepos; WORD m_uSpanlen; WORD m_uMatchlen; DWORD m_uWeight; }; enum SphZoneHit_e { SPH_ZONE_FOUND, SPH_ZONE_NO_SPAN, SPH_ZONE_NO_DOCUMENT }; class ISphZoneCheck { public: virtual ~ISphZoneCheck () {} virtual SphZoneHit_e IsInZone ( int iZone, const ExtHit_t * pHit ) = 0; }; ////////////////////////////////////////////////////////////////////////// // INLINES, MISC ////////////////////////////////////////////////////////////////////////// inline const char * sphTypeName ( ESphAttr eType ) { switch ( eType ) { case SPH_ATTR_NONE: return "none"; case SPH_ATTR_INTEGER: return "uint"; case SPH_ATTR_TIMESTAMP: return "timestamp"; case SPH_ATTR_ORDINAL: return "ordinal"; case SPH_ATTR_BOOL: return "bool"; case SPH_ATTR_FLOAT: return "float"; case SPH_ATTR_BIGINT: return "bigint"; case SPH_ATTR_STRING: return "string"; case SPH_ATTR_WORDCOUNT: return "wordcount"; case SPH_ATTR_UINT32SET: return "mva"; case SPH_ATTR_UINT64SET: return "mva64"; default: return "unknown"; } } inline const char * sphTypeDirective ( ESphAttr eType ) { switch ( eType ) { case SPH_ATTR_NONE: return "???"; case SPH_ATTR_INTEGER: return "sql_attr_uint"; case SPH_ATTR_TIMESTAMP: return "sql_attr_timestamp"; case SPH_ATTR_ORDINAL: return "sql_attr_str2ordinal"; case SPH_ATTR_BOOL: return "sql_attr_bool"; case SPH_ATTR_FLOAT: return "sql_attr_float"; case SPH_ATTR_BIGINT: return "sql_attr_bigint"; case SPH_ATTR_STRING: return "sql_attr_string"; case SPH_ATTR_WORDCOUNT: return "sql_attr_wordcount"; case SPH_ATTR_UINT32SET: return "sql_attr_multi"; case SPH_ATTR_UINT64SET: return "sql_attr_multi bigint"; default: return "???"; } } inline void SqlUnescape ( CSphString & sRes, const char * sEscaped, int iLen ) { assert ( iLen>=2 ); assert ( sEscaped[0]=='\'' ); assert ( sEscaped[iLen-1]=='\'' ); // skip heading and trailing quotes const char * s = sEscaped+1; const char * sMax = s+iLen-2; sRes.Reserve ( iLen ); char * d = (char*) sRes.cstr(); while ( sLoadStopwords ( sFiles, pTokenizer ); } virtual bool LoadWordforms ( const char * sFile, ISphTokenizer * pTokenizer, const char * sIndex ) { return m_pDict->LoadWordforms ( sFile, pTokenizer, sIndex ); } virtual bool SetMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ) { return m_pDict->SetMorphology ( szMorph, bUseUTF8, sError ); } virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ) { return m_pDict->GetWordID ( pWord, iLen, bFilterStops ); } virtual void Setup ( const CSphDictSettings & ) {} virtual const CSphDictSettings & GetSettings () const { return m_pDict->GetSettings (); } virtual const CSphVector & GetStopwordsFileInfos () { return m_pDict->GetStopwordsFileInfos (); } virtual const CSphSavedFile & GetWordformsFileInfo () { return m_pDict->GetWordformsFileInfo (); } virtual const CSphMultiformContainer * GetMultiWordforms () const { return m_pDict->GetMultiWordforms (); } virtual bool IsStopWord ( const BYTE * pWord ) const { return m_pDict->IsStopWord ( pWord ); } protected: CSphDict * m_pDict; }; /// dict wrapper for star-syntax support in prefix-indexes class CSphDictStar : public CSphDictTraits { public: explicit CSphDictStar ( CSphDict * pDict ) : CSphDictTraits ( pDict ) {} virtual SphWordID_t GetWordID ( BYTE * pWord ); virtual SphWordID_t GetWordIDNonStemmed ( BYTE * pWord ); }; /// star dict for index v.8+ class CSphDictStarV8 : public CSphDictStar { public: CSphDictStarV8 ( CSphDict * pDict, bool bPrefixes, bool bInfixes ); virtual SphWordID_t GetWordID ( BYTE * pWord ); private: bool m_bPrefixes; bool m_bInfixes; }; /// dict wrapper for exact-word syntax class CSphDictExact : public CSphDictTraits { public: explicit CSphDictExact ( CSphDict * pDict ) : CSphDictTraits ( pDict ) {} virtual SphWordID_t GetWordID ( BYTE * pWord ); }; ////////////////////////////////////////////////////////////////////////// // USER VARIABLES ////////////////////////////////////////////////////////////////////////// /// value container for the intset uservar type class UservarIntSet_c : public CSphVector, public ISphRefcountedMT { }; ////////////////////////////////////////////////////////////////////////// // BINLOG INTERNALS ////////////////////////////////////////////////////////////////////////// /// global binlog interface class ISphBinlog : ISphNoncopyable { public: virtual ~ISphBinlog () {} virtual void BinlogUpdateAttributes ( int64_t * pTID, const char * sIndexName, const CSphAttrUpdate & tUpd ) = 0; virtual void NotifyIndexFlush ( const char * sIndexName, int64_t iTID, bool bShutdown ) = 0; }; ////////////////////////////////////////////////////////////////////////// // MISC FUNCTION PROTOTYPES ////////////////////////////////////////////////////////////////////////// struct SphStringSorterRemap_t { CSphAttrLocator m_tSrc; CSphAttrLocator m_tDst; }; void SafeClose ( int & iFD ); const BYTE * SkipQuoted ( const BYTE * p ); ISphExpr * sphSortSetupExpr ( const CSphString & sName, const CSphSchema & tIndexSchema ); bool sphSortGetStringRemap ( const CSphSchema & tSorterSchema, const CSphSchema & tIndexSchema, CSphVector & dAttrs ); bool sphIsSortStringInternal ( const char * sColumnName ); bool sphWriteThrottled ( int iFD, const void * pBuf, int64_t iCount, const char * sName, CSphString & sError ); void sphMergeStats ( CSphQueryResultMeta & tDstResult, const SmallStringHash_T & hSrc ); bool sphCheckQueryHeight ( const struct XQNode_t * pRoot, CSphString & sError ); void sphTransformExtendedQuery ( XQNode_t ** ppNode ); void WriteSchema ( CSphWriter & fdInfo, const CSphSchema & tSchema ); void ReadSchema ( CSphReader & rdInfo, CSphSchema & m_tSchema, DWORD uVersion, bool bDynamic ); void SaveIndexSettings ( CSphWriter & tWriter, const CSphIndexSettings & m_tSettings ); void LoadIndexSettings ( CSphIndexSettings & tSettings, CSphReader & tReader, DWORD uVersion ); void SaveTokenizerSettings ( CSphWriter & tWriter, ISphTokenizer * pTokenizer ); void LoadTokenizerSettings ( CSphReader & tReader, CSphTokenizerSettings & tSettings, DWORD uVersion, CSphString & sWarning ); void SaveDictionarySettings ( CSphWriter & tWriter, CSphDict * pDict, bool bForceWordDict ); void LoadDictionarySettings ( CSphReader & tReader, CSphDictSettings & tSettings, DWORD uVersion, CSphString & sWarning ); int sphDictCmp ( const char * pStr1, int iLen1, const char * pStr2, int iLen2 ); int sphDictCmpStrictly ( const char * pStr1, int iLen1, const char * pStr2, int iLen2 ); template int sphCheckpointCmp ( const char * sWord, int iLen, SphWordID_t iWordID, bool bWordDict, const CP & tCP ) { if ( bWordDict ) return sphDictCmp ( sWord, iLen, tCP.m_sWord, strlen ( tCP.m_sWord ) ); int iRes = 0; iRes = iWordIDtCP.m_iWordID ? 1 : iRes; return iRes; } template int sphCheckpointCmpStrictly ( const char * sWord, int iLen, SphWordID_t iWordID, bool bWordDict, const CP & tCP ) { if ( bWordDict ) return sphDictCmpStrictly ( sWord, iLen, tCP.m_sWord, strlen ( tCP.m_sWord ) ); int iRes = 0; iRes = iWordIDtCP.m_iWordID ? 1 : iRes; return iRes; } template < typename CP > const CP * sphSearchCheckpoint ( const char * sWord, int iWordLen, SphWordID_t iWordID , bool bStarMode, bool bWordDict , const CP * pFirstCP, const CP * pLastCP ) { assert ( !bWordDict || iWordLen>0 ); const CP * pStart = pFirstCP; const CP * pEnd = pLastCP; if ( bStarMode && sphCheckpointCmp ( sWord, iWordLen, iWordID, bWordDict, *pStart )<0 ) return NULL; if ( !bStarMode && sphCheckpointCmpStrictly ( sWord, iWordLen, iWordID, bWordDict, *pStart )<0 ) return NULL; if ( sphCheckpointCmpStrictly ( sWord, iWordLen, iWordID, bWordDict, *pEnd )>=0 ) pStart = pEnd; else { while ( pEnd-pStart>1 ) { const CP * pMid = pStart + (pEnd-pStart)/2; const int iCmpRes = sphCheckpointCmpStrictly ( sWord, iWordLen, iWordID, bWordDict, *pMid ); if ( iCmpRes==0 ) { pStart = pMid; break; } else if ( iCmpRes<0 ) pEnd = pMid; else pStart = pMid; } assert ( pStart>=pFirstCP ); assert ( pStart<=pLastCP ); assert ( sphCheckpointCmp ( sWord, iWordLen, iWordID, bWordDict, *pStart )>=0 && sphCheckpointCmpStrictly ( sWord, iWordLen, iWordID, bWordDict, *pEnd )<0 ); } return pStart; } class ISphRtDictWraper : public CSphDict { public: virtual const BYTE * GetPackedKeywords () = 0; virtual int GetPackedLen () = 0; virtual void ResetKeywords() = 0; }; ISphRtDictWraper * sphCreateRtKeywordsDictionaryWrapper ( CSphDict * pBase ); class ISphWordlist { public: virtual ~ISphWordlist () {} virtual void GetPrefixedWords ( const char * sWord, int iWordLen, CSphVector & dPrefixedWords, BYTE * pDictBuf, int iFD ) const = 0; }; struct ExpansionContext_t { const ISphWordlist * m_pWordlist; BYTE * m_pBuf; CSphQueryResultMeta * m_pResult; int m_iFD; int m_iMinPrefixLen; int m_iExpansionLimit; bool m_bStarEnabled; bool m_bHasMorphology; }; XQNode_t * sphExpandXQNode ( XQNode_t * pNode, ExpansionContext_t & tCtx ); class CSphKeywordDeltaWriter { private: BYTE m_sLastKeyword [SPH_MAX_WORD_LEN*3+4]; int m_iLastLen; public: CSphKeywordDeltaWriter () { Reset(); } void Reset () { m_iLastLen = 0; } template void PutDelta ( F & WRITER, const BYTE * pWord, int iLen ) { assert ( pWord && iLen ); // how many bytes of a previous keyword can we reuse? BYTE iMatch = 0; int iMinLen = Min ( m_iLastLen, iLen ); assert ( iMinLen<(int)sizeof(m_sLastKeyword) ); while ( iMatch0 ); memcpy ( m_sLastKeyword, pWord, iLen ); m_iLastLen = iLen; // match and delta are usually tiny, pack them together in 1 byte // tricky bit, this byte leads the entry so it must never be 0 (aka eof mark)! if ( iDelta<=8 && iMatch<=15 ) { BYTE uPacked = ( 0x80 + ( (iDelta-1)<<4 ) + iMatch ); WRITER.PutBytes ( &uPacked, 1 ); } else { WRITER.PutBytes ( &iDelta, 1 ); // always greater than 0 WRITER.PutBytes ( &iMatch, 1 ); } WRITER.PutBytes ( pWord + iMatch, iDelta ); } }; BYTE sphDoclistHintPack ( SphOffset_t iDocs, SphOffset_t iLen ); // wordlist checkpoints frequency #define SPH_WORDLIST_CHECKPOINT 64 /// startup mva updates arena const char * sphArenaInit ( int iMaxBytes ); #endif // _sphinxint_ // // $Id: sphinxint.h 3117 2012-02-22 20:30:36Z tomat $ // sphinx-2.0.4-release/src/sphinxstd.cpp0000644000176700017710000007410611723624274017302 0ustar deogardeogar// // $Id: sphinxstd.cpp 3130 2012-03-01 07:43:56Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxint.h" #include "sphinxutils.h" #if !USE_WINDOWS #include // for gettimeofday #endif static int g_iThreadStackSize = 65536; ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS #ifndef NDEBUG void sphAssert ( const char * sExpr, const char * sFile, int iLine ) { char sBuffer [ 1024 ]; _snprintf ( sBuffer, sizeof(sBuffer), "%s(%d): assertion %s failed\n", sFile, iLine, sExpr ); if ( MessageBox ( NULL, sBuffer, "Assert failed! Cancel to debug.", MB_OKCANCEL | MB_TOPMOST | MB_SYSTEMMODAL | MB_ICONEXCLAMATION )!=IDOK ) { __debugbreak (); } else { fprintf ( stdout, "%s", sBuffer ); exit ( 1 ); } } #endif // !NDEBUG #endif // USE_WINDOWS ///////////////////////////////////////////////////////////////////////////// // DEBUG MEMORY MANAGER ///////////////////////////////////////////////////////////////////////////// #if SPH_DEBUG_LEAKS #undef new #define SPH_DEBUG_DOFREE 1 // 0 will not actually free returned blocks; helps to catch double deletes etc const DWORD MEMORY_MAGIC_PLAIN = 0xbbbbbbbbUL; const DWORD MEMORY_MAGIC_ARRAY = 0xaaaaaaaaUL; const DWORD MEMORY_MAGIC_END = 0xeeeeeeeeUL; const DWORD MEMORY_MAGIC_DELETED = 0xdedededeUL; struct CSphMemHeader { DWORD m_uMagic; const char * m_sFile; int m_iLine; size_t m_iSize; int m_iAllocId; CSphMemHeader * m_pNext; CSphMemHeader * m_pPrev; }; static CSphStaticMutex g_tAllocsMutex; static int g_iCurAllocs = 0; static int g_iAllocsId = 0; static CSphMemHeader * g_pAllocs = NULL; static int64_t g_iCurBytes = 0; static int g_iTotalAllocs = 0; static int g_iPeakAllocs = 0; static int64_t g_iPeakBytes = 0; void * sphDebugNew ( size_t iSize, const char * sFile, int iLine, bool bArray ) { BYTE * pBlock = (BYTE*) ::malloc ( iSize+sizeof(CSphMemHeader)+sizeof(DWORD) ); if ( !pBlock ) sphDie ( "out of memory (unable to allocate "UINT64_FMT" bytes)", (uint64_t)iSize ); // FIXME! this may fail with malloc error too *(DWORD*)( pBlock+iSize+sizeof(CSphMemHeader) ) = MEMORY_MAGIC_END; g_tAllocsMutex.Lock(); CSphMemHeader * pHeader = (CSphMemHeader*) pBlock; pHeader->m_uMagic = bArray ? MEMORY_MAGIC_ARRAY : MEMORY_MAGIC_PLAIN; pHeader->m_sFile = sFile; pHeader->m_iLine = iLine; pHeader->m_iSize = iSize; pHeader->m_iAllocId = ++g_iAllocsId; pHeader->m_pNext = g_pAllocs; pHeader->m_pPrev = NULL; if ( g_pAllocs ) { assert ( !g_pAllocs->m_pPrev ); g_pAllocs->m_pPrev = pHeader; } g_pAllocs = pHeader; g_iCurAllocs++; g_iCurBytes += iSize; g_iTotalAllocs++; g_iPeakAllocs = Max ( g_iPeakAllocs, g_iCurAllocs ); g_iPeakBytes = Max ( g_iPeakBytes, g_iCurBytes ); g_tAllocsMutex.Unlock(); return pHeader+1; } void sphDebugDelete ( void * pPtr, bool bArray ) { if ( !pPtr ) return; g_tAllocsMutex.Lock(); CSphMemHeader * pHeader = ((CSphMemHeader*)pPtr)-1; switch ( pHeader->m_uMagic ) { case MEMORY_MAGIC_ARRAY: if ( !bArray ) sphDie ( "delete [] on non-array block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); break; case MEMORY_MAGIC_PLAIN: if ( bArray ) sphDie ( "delete on array block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); break; case MEMORY_MAGIC_DELETED: sphDie ( "double delete on block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); break; default: sphDie ( "delete on unmanaged block at 0x%08x", pPtr ); return; } BYTE * pBlock = (BYTE*) pHeader; if ( *(DWORD*)( pBlock+pHeader->m_iSize+sizeof(CSphMemHeader) )!=MEMORY_MAGIC_END ) sphDie ( "out-of-bounds write beyond block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); // unchain if ( pHeader==g_pAllocs ) g_pAllocs = g_pAllocs->m_pNext; if ( pHeader->m_pPrev ) { assert ( pHeader->m_pPrev->m_uMagic==MEMORY_MAGIC_PLAIN || pHeader->m_pPrev->m_uMagic==MEMORY_MAGIC_ARRAY ); pHeader->m_pPrev->m_pNext = pHeader->m_pNext; } if ( pHeader->m_pNext ) { assert ( pHeader->m_pNext->m_uMagic==MEMORY_MAGIC_PLAIN || pHeader->m_pNext->m_uMagic==MEMORY_MAGIC_ARRAY ); pHeader->m_pNext->m_pPrev = pHeader->m_pPrev; } pHeader->m_pPrev = NULL; pHeader->m_pNext = NULL; // mark and delete pHeader->m_uMagic = MEMORY_MAGIC_DELETED; g_iCurAllocs--; g_iCurBytes -= pHeader->m_iSize; #if SPH_DEBUG_DOFREE ::free ( pHeader ); #endif g_tAllocsMutex.Unlock(); } int64_t sphAllocBytes () { return g_iCurBytes; } int sphAllocsCount () { return g_iCurAllocs; } int sphAllocsLastID () { return g_iAllocsId; } void sphAllocsDump ( int iFile, int iSinceID ) { g_tAllocsMutex.Lock(); sphSafeInfo ( iFile, "--- dumping allocs since %d ---\n", iSinceID ); uint64_t iTotalBytes = 0; int iTotal = 0; for ( CSphMemHeader * pHeader = g_pAllocs; pHeader && pHeader->m_iAllocId > iSinceID; pHeader = pHeader->m_pNext ) { sphSafeInfo ( iFile, "alloc %d at %s(%d): %d bytes\n", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine, (int)pHeader->m_iSize ); iTotalBytes += pHeader->m_iSize; iTotal++; } sphSafeInfo ( iFile, "total allocs %d: %d.%03d bytes", iTotal, (int)(iTotalBytes/1024), (int)(iTotalBytes%1000) ); sphSafeInfo ( iFile, "--- end of dump ---\n" ); g_tAllocsMutex.Unlock(); } void sphAllocsStats () { fprintf ( stdout, "--- total-allocs=%d, peak-allocs=%d, peak-bytes="INT64_FMT"\n", g_iTotalAllocs, g_iPeakAllocs, g_iPeakBytes ); } void sphAllocsCheck () { g_tAllocsMutex.Lock(); for ( CSphMemHeader * pHeader=g_pAllocs; pHeader; pHeader=pHeader->m_pNext ) { BYTE * pBlock = (BYTE*) pHeader; if (!( pHeader->m_uMagic==MEMORY_MAGIC_ARRAY || pHeader->m_uMagic==MEMORY_MAGIC_PLAIN )) sphDie ( "corrupted header in block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); if ( *(DWORD*)( pBlock+pHeader->m_iSize+sizeof(CSphMemHeader) )!=MEMORY_MAGIC_END ) sphDie ( "out-of-bounds write beyond block %d allocated at %s(%d)", pHeader->m_iAllocId, pHeader->m_sFile, pHeader->m_iLine ); } g_tAllocsMutex.Unlock(); } void sphMemStatInit () {} void sphMemStatDone () {} void sphMemStatDump ( int ) {} ////////////////////////////////////////////////////////////////////////// void * operator new ( size_t iSize, const char * sFile, int iLine ) { return sphDebugNew ( iSize, sFile, iLine, false ); } void * operator new [] ( size_t iSize, const char * sFile, int iLine ) { return sphDebugNew ( iSize, sFile, iLine, true ); } void operator delete ( void * pPtr ) { sphDebugDelete ( pPtr, false ); } void operator delete [] ( void * pPtr ) { sphDebugDelete ( pPtr, true ); } ////////////////////////////////////////////////////////////////////////////// // ALLOCACTIONS COUNT/SIZE PROFILER ////////////////////////////////////////////////////////////////////////////// #else #if SPH_ALLOCS_PROFILER #undef new static CSphStaticMutex g_tAllocsMutex; static int g_iAllocsId = 0; static int g_iCurAllocs = 0; static int64_t g_iCurBytes = 0; static int g_iTotalAllocs = 0; static int g_iPeakAllocs = 0; static int64_t g_iPeakBytes = 0; // statictic's per memory category struct MemCategorized_t { int64_t m_iSize; int m_iCount; MemCategorized_t() : m_iSize ( 0 ) , m_iCount ( 0 ) { } }; static Memory::Category_e sphMemStatGet (); // memory categories storage static MemCategorized_t g_dMemCategoryStat[Memory::SPH_MEM_TOTAL]; ////////////////////////////////////////////////////////////////////////// // ALLOCATIONS COUNT/SIZE PROFILER ////////////////////////////////////////////////////////////////////////// void * sphDebugNew ( size_t iSize ) { BYTE * pBlock = (BYTE*) ::malloc ( iSize+sizeof(size_t)*2 ); if ( !pBlock ) sphDie ( "out of memory (unable to allocate %"PRIu64" bytes)", (uint64_t)iSize ); // FIXME! this may fail with malloc error too const int iMemType = sphMemStatGet(); assert ( iMemType>=0 && iMemType=0 && iMemType=0 && eCategory=0 && eCategory=0 ); assert ( m_dStack[m_iDepth]==eCategory ); m_iDepth--; } Memory::Category_e Top () const { assert ( m_iDepth>= 0 && m_iDepth=0 && m_dStack[m_iDepth]Reset(); Verify ( sphThreadSet ( g_tTLSMemCategory, pTLS ) ); return pTLS; } // per thread cleanup of memory statistic's static void sphMemStatThdCleanup ( MemCategoryStack_t * pTLS ) { sphDebugDelete ( pTLS ); } // init of memory statistic's data static void sphMemStatInit () { Verify ( sphThreadKeyCreate ( &g_tTLSMemCategory ) ); // main thread statistic's creation assert ( g_pMainTLS==NULL ); g_pMainTLS = sphMemStatThdInit(); assert ( g_pMainTLS!=NULL ); } // cleanup of memory statistic's data static void sphMemStatDone () { assert ( g_pMainTLS!=NULL ); sphMemStatThdCleanup ( g_pMainTLS ); sphThreadKeyDelete ( g_tTLSMemCategory ); } // direct access for special category void sphMemStatMMapAdd ( int64_t iSize ) { g_tAllocsMutex.Lock (); g_iCurAllocs++; g_iCurBytes += iSize; g_iTotalAllocs++; g_iPeakAllocs = Max ( g_iCurAllocs, g_iPeakAllocs ); g_iPeakBytes = Max ( g_iCurBytes, g_iPeakBytes ); g_dMemCategoryStat[Memory::SPH_MEM_MMAPED].m_iSize += iSize; g_dMemCategoryStat[Memory::SPH_MEM_MMAPED].m_iCount++; g_tAllocsMutex.Unlock (); } void sphMemStatMMapDel ( int64_t iSize ) { g_tAllocsMutex.Lock (); g_iCurAllocs--; g_iCurBytes -= iSize; g_dMemCategoryStat[Memory::SPH_MEM_MMAPED].m_iSize -= iSize; g_dMemCategoryStat[Memory::SPH_MEM_MMAPED].m_iCount--; g_tAllocsMutex.Unlock (); } // push new category on arrival void sphMemStatPush ( Memory::Category_e eCategory ) { MemCategoryStack_t * pTLS = (MemCategoryStack_t*) sphThreadGet ( g_tTLSMemCategory ); if ( pTLS ) pTLS->Push ( eCategory ); }; // restore last category void sphMemStatPop ( Memory::Category_e eCategory ) { MemCategoryStack_t * pTLS = (MemCategoryStack_t*) sphThreadGet ( g_tTLSMemCategory ); if ( pTLS ) pTLS->Pop ( eCategory ); }; // get current category static Memory::Category_e sphMemStatGet () { MemCategoryStack_t * pTLS = (MemCategoryStack_t*) sphThreadGet ( g_tTLSMemCategory ); return pTLS ? pTLS->Top() : Memory::SPH_MEM_CORE; } // human readable category names static const char* g_dMemCategoryName[] = { "core" , "index_disk", "index_rt", "index_rt_accum" , "mmaped", "binlog" , "hnd_disk", "hnd_sql" , "search_disk", "query_disk", "insert_sql", "select_sql", "delete_sql", "commit_set_sql", "commit_start_t_sql", "commit_sql" , "mquery_disk", "mqueryex_disk", "mquery_rt" , "rt_res_matches", "rt_res_strings" }; STATIC_ASSERT ( sizeof(g_dMemCategoryName)/sizeof(g_dMemCategoryName[0])==Memory::SPH_MEM_TOTAL, MEM_STAT_NAME_MISMATCH ); // output of memory statistic's void sphMemStatDump ( int iFD ) { int64_t iSize = 0; int iCount = 0; for ( int i=0; i0 ) { iSize = (int64_t) g_dMemCategoryStat[i].m_iSize; sphSafeInfo ( iFD, "%-24s allocs-count=%d, mem-total=%d.%d Mb", g_dMemCategoryName[i], g_dMemCategoryStat[i].m_iCount, (int)(iSize/1048576), (int)( (iSize*10/1048576)%10 ) ); } } ////////////////////////////////////////////////////////////////////////////// // PRODUCTION MEMORY MANAGER ////////////////////////////////////////////////////////////////////////////// #else void * operator new ( size_t iSize ) { void * pResult = ::malloc ( iSize ); if ( !pResult ) sphDie ( "out of memory (unable to allocate "UINT64_FMT" bytes)", (uint64_t)iSize ); // FIXME! this may fail with malloc error too return pResult; } void * operator new [] ( size_t iSize ) { void * pResult = ::malloc ( iSize ); if ( !pResult ) sphDie ( "out of memory (unable to allocate "UINT64_FMT" bytes)", (uint64_t)iSize ); // FIXME! this may fail with malloc error too return pResult; } void operator delete ( void * pPtr ) { if ( pPtr ) ::free ( pPtr ); } void operator delete [] ( void * pPtr ) { if ( pPtr ) ::free ( pPtr ); } #endif // SPH_ALLOCS_PROFILER #endif // SPH_DEBUG_LEAKS ////////////////////////////////////////////////////////////////////////// // now let the rest of sphinxstd use proper new #if SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER #undef new #define new new(__FILE__,__LINE__) #endif ///////////////////////////////////////////////////////////////////////////// // HELPERS ///////////////////////////////////////////////////////////////////////////// static SphDieCallback_t g_pfDieCallback = NULL; void sphSetDieCallback ( SphDieCallback_t pfDieCallback ) { g_pfDieCallback = pfDieCallback; } void sphDie ( const char * sTemplate, ... ) { char sBuf[256]; va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf, sizeof(sBuf), sTemplate, ap ); va_end ( ap ); // if there's no callback, // or if callback returns true, // log to stdout if ( !g_pfDieCallback || g_pfDieCallback ( sBuf ) ) fprintf ( stdout, "FATAL: %s\n", sBuf ); exit ( 1 ); } ////////////////////////////////////////////////////////////////////////// // RANDOM NUMBERS GENERATOR ////////////////////////////////////////////////////////////////////////// /// MWC (Multiply-With-Carry) RNG, invented by George Marsaglia static DWORD g_dRngState[5] = { 0x95d3474bUL, 0x035cf1f7UL, 0xfd43995fUL, 0x5dfc55fbUL, 0x334a9229UL }; /// seed void sphSrand ( DWORD uSeed ) { for ( int i=0; i<5; i++ ) { uSeed = uSeed*29943829 - 1; g_dRngState[i] = uSeed; } for ( int i=0; i<19; i++ ) sphRand(); } /// auto-seed RNG based on time and PID void sphAutoSrand () { // get timestamp #if !USE_WINDOWS struct timeval tv; gettimeofday ( &tv, NULL ); #else #define getpid() GetCurrentProcessId() struct { time_t tv_sec; DWORD tv_usec; } tv; FILETIME ft; GetSystemTimeAsFileTime ( &ft ); uint64_t ts = ( uint64_t(ft.dwHighDateTime)<<32 ) + uint64_t(ft.dwLowDateTime) - 116444736000000000ULL; // Jan 1, 1970 magic ts /= 10; // to microseconds tv.tv_sec = (DWORD)(ts/1000000); tv.tv_usec = (DWORD)(ts%1000000); #endif // twist and shout sphSrand ( sphRand() ^ DWORD(tv.tv_sec) ^ (DWORD(tv.tv_usec) + DWORD(getpid())) ); } /// generate another dword DWORD sphRand () { uint64_t uSum; uSum = (uint64_t)g_dRngState[0] * (uint64_t)5115 + (uint64_t)g_dRngState[1] * (uint64_t)1776 + (uint64_t)g_dRngState[2] * (uint64_t)1492 + (uint64_t)g_dRngState[3] * (uint64_t)2111111111UL + (uint64_t)g_dRngState[4]; g_dRngState[3] = g_dRngState[2]; g_dRngState[2] = g_dRngState[1]; g_dRngState[1] = g_dRngState[0]; g_dRngState[4] = (DWORD)( uSum>>32 ); g_dRngState[0] = (DWORD)uSum; return g_dRngState[0]; } ////////////////////////////////////////////////////////////////////////// #if !USE_WINDOWS CSphProcessSharedMutex::CSphProcessSharedMutex ( int iExtraSize ) { m_pMutex = NULL; pthread_mutexattr_t tAttr; int iRes = pthread_mutexattr_init ( &tAttr ); if ( iRes ) { m_sError.SetSprintf ( "pthread_mutexattr_init, errno=%d", iRes ); return; } iRes = pthread_mutexattr_setpshared ( &tAttr, PTHREAD_PROCESS_SHARED ); if ( iRes ) { m_sError.SetSprintf ( "pthread_mutexattr_setpshared, errno = %d", iRes ); return; } CSphString sError, sWarning; if ( !m_pStorage.Alloc ( sizeof(pthread_mutex_t) + iExtraSize, sError, sWarning ) ) { m_sError.SetSprintf ( "storage.alloc, error='%s', warning='%s'", sError.cstr(), sWarning.cstr() ); return; } m_pMutex = (pthread_mutex_t*) m_pStorage.GetWritePtr (); iRes = pthread_mutex_init ( m_pMutex, &tAttr ); if ( iRes ) { m_sError.SetSprintf ( "pthread_mutex_init, errno=%d ", iRes ); m_pMutex = NULL; m_pStorage.Reset (); return; } } #else CSphProcessSharedMutex::CSphProcessSharedMutex ( int ) {} #endif void CSphProcessSharedMutex::Lock () const { #if !USE_WINDOWS if ( m_pMutex ) pthread_mutex_lock ( m_pMutex ); #endif } void CSphProcessSharedMutex::Unlock () const { #if !USE_WINDOWS if ( m_pMutex ) pthread_mutex_unlock ( m_pMutex ); #endif } #if USE_WINDOWS bool CSphProcessSharedMutex::TimedLock ( int ) const { return false; #else bool CSphProcessSharedMutex::TimedLock ( int tmSpin ) const { if ( !m_pMutex ) return false; #if defined(HAVE_PTHREAD_MUTEX_TIMEDLOCK) && defined(HAVE_CLOCK_GETTIME) struct timespec tp; clock_gettime ( CLOCK_REALTIME, &tp ); tp.tv_nsec += tmSpin * 1000; if ( tp.tv_nsec > 1000000 ) { int iDelta = (int)( tp.tv_nsec / 1000000 ); tp.tv_sec += iDelta * 1000000; tp.tv_nsec -= iDelta * 1000000; } return ( pthread_mutex_timedlock ( m_pMutex, &tp )==0 ); #else int iRes = EBUSY; int64_t tmTill = sphMicroTimer() + tmSpin; do { iRes = pthread_mutex_trylock ( m_pMutex ); if ( iRes==EBUSY ) sphSleepMsec ( 0 ); } while ( iRes==EBUSY && sphMicroTimer()m_pCall ( pCall->m_pArg ); SafeDelete ( pCall ); ThreadCall_t * pCleanup = (ThreadCall_t*) sphThreadGet ( g_tThreadCleanupKey ); while ( pCleanup ) { pCall = pCleanup; pCall->m_pCall ( pCall->m_pArg ); pCleanup = pCall->m_pNext; SafeDelete ( pCall ); } #if SPH_ALLOCS_PROFILER sphMemStatThdCleanup ( pTLS ); #endif return 0; } #if !USE_WINDOWS void * sphThreadInit ( bool bDetached ) #else void * sphThreadInit ( bool ) #endif { static bool bInit = false; #if !USE_WINDOWS static pthread_attr_t tJoinableAttr; static pthread_attr_t tDetachedAttr; #endif if ( !bInit ) { #if SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER sphMemStatInit(); #endif // we're single-threaded yet, right?! if ( !sphThreadKeyCreate ( &g_tThreadCleanupKey ) ) sphDie ( "FATAL: sphThreadKeyCreate() failed" ); if ( !sphThreadKeyCreate ( &g_tMyThreadStack ) ) sphDie ( "FATAL: sphThreadKeyCreate() failed" ); #if !USE_WINDOWS if ( pthread_attr_init ( &tJoinableAttr ) ) sphDie ( "FATAL: pthread_attr_init( joinable ) failed" ); if ( pthread_attr_init ( &tDetachedAttr ) ) sphDie ( "FATAL: pthread_attr_init( detached ) failed" ); if ( pthread_attr_setdetachstate ( &tDetachedAttr, PTHREAD_CREATE_DETACHED ) ) sphDie ( "FATAL: pthread_attr_setdetachstate( detached ) failed" ); #endif bInit = true; } #if !USE_WINDOWS if ( pthread_attr_setstacksize ( &tJoinableAttr, sphMyStackSize() ) ) sphDie ( "FATAL: pthread_attr_setstacksize( joinable ) failed" ); if ( pthread_attr_setstacksize ( &tDetachedAttr, sphMyStackSize() ) ) sphDie ( "FATAL: pthread_attr_setstacksize( detached ) failed" ); return bDetached ? &tDetachedAttr : &tJoinableAttr; #else return NULL; #endif } #if SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER void sphThreadDone ( int iFD ) { sphMemStatDump ( iFD ); sphMemStatDone(); } #else void sphThreadDone ( int ) { } #endif bool sphThreadCreate ( SphThread_t * pThread, void (*fnThread)(void*), void * pArg, bool bDetached ) { // we can not merely put this on current stack // as it might get destroyed before wrapper sees it ThreadCall_t * pCall = new ThreadCall_t; pCall->m_pCall = fnThread; pCall->m_pArg = pArg; pCall->m_pNext = NULL; // create thread #if USE_WINDOWS sphThreadInit ( bDetached ); *pThread = CreateThread ( NULL, sphMyStackSize(), sphThreadProcWrapper, pCall, 0, NULL ); if ( *pThread ) return true; #else void * pAttr = sphThreadInit ( bDetached ); errno = pthread_create ( pThread, (pthread_attr_t*) pAttr, sphThreadProcWrapper, pCall ); if ( !errno ) return true; #endif // thread creation failed so we need to cleanup ourselves SafeDelete ( pCall ); return false; } bool sphThreadJoin ( SphThread_t * pThread ) { #if USE_WINDOWS DWORD uWait = WaitForSingleObject ( *pThread, INFINITE ); CloseHandle ( *pThread ); *pThread = NULL; return ( uWait==WAIT_OBJECT_0 || uWait==WAIT_ABANDONED ); #else return pthread_join ( *pThread, NULL )==0; #endif } void sphThreadOnExit ( void (*fnCleanup)(void*), void * pArg ) { ThreadCall_t * pCleanup = new ThreadCall_t; pCleanup->m_pCall = fnCleanup; pCleanup->m_pArg = pArg; pCleanup->m_pNext = (ThreadCall_t*) sphThreadGet ( g_tThreadCleanupKey ); sphThreadSet ( g_tThreadCleanupKey, pCleanup ); } bool sphThreadKeyCreate ( SphThreadKey_t * pKey ) { #if USE_WINDOWS *pKey = TlsAlloc(); return *pKey!=TLS_OUT_OF_INDEXES; #else return pthread_key_create ( pKey, NULL )==0; #endif } void sphThreadKeyDelete ( SphThreadKey_t tKey ) { #if USE_WINDOWS TlsFree ( tKey ); #else pthread_key_delete ( tKey ); #endif } void * sphThreadGet ( SphThreadKey_t tKey ) { #if USE_WINDOWS return TlsGetValue ( tKey ); #else return pthread_getspecific ( tKey ); #endif } void * sphMyStack () { return sphThreadGet ( g_tMyThreadStack ); } int64_t sphGetStackUsed() { BYTE cStack; BYTE * pStackTop = (BYTE*)sphMyStack(); if ( !pStackTop ) return 0; int64_t iHeight = pStackTop - &cStack; if ( iHeight>=0 ) return iHeight; else return -iHeight; } int sphMyStackSize () { #if USE_WINDOWS return g_iThreadStackSize; #else return PTHREAD_STACK_MIN + g_iThreadStackSize; #endif } void sphSetMyStackSize ( int iStackSize ) { g_iThreadStackSize = iStackSize; sphThreadInit ( false ); } void MemorizeStack ( void* PStack ) { sphThreadSet ( g_tMyThreadStack, PStack ); } bool sphThreadSet ( SphThreadKey_t tKey, void * pValue ) { #if USE_WINDOWS return TlsSetValue ( tKey, pValue )!=FALSE; #else return pthread_setspecific ( tKey, pValue )==0; #endif } #if !USE_WINDOWS bool sphIsLtLib() { #ifndef _CS_GNU_LIBPTHREAD_VERSION return false; #else char buff[64]; confstr ( _CS_GNU_LIBPTHREAD_VERSION, buff, 64 ); if ( !strncasecmp ( buff, "linuxthreads", 12 ) ) return true; return false; #endif } #endif ////////////////////////////////////////////////////////////////////////// // MUTEX ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS // Windows mutex implementation bool CSphMutex::Init () { m_hMutex = CreateMutex ( NULL, FALSE, NULL ); m_bInitialized = ( m_hMutex!=NULL ); return m_bInitialized; } bool CSphMutex::Done () { if ( !m_bInitialized ) return true; m_bInitialized = false; return CloseHandle ( m_hMutex )==TRUE; } bool CSphMutex::Lock () { DWORD uWait = WaitForSingleObject ( m_hMutex, INFINITE ); return ( uWait!=WAIT_FAILED && uWait!=WAIT_TIMEOUT ); } bool CSphMutex::Unlock () { return ReleaseMutex ( m_hMutex )==TRUE; } #else // UNIX mutex implementation bool CSphMutex::Init () { m_bInitialized = ( pthread_mutex_init ( &m_tMutex, NULL )==0 ); return m_bInitialized; } bool CSphMutex::Done () { if ( !m_bInitialized ) return true; m_bInitialized = false; return pthread_mutex_destroy ( &m_tMutex )==0; } bool CSphMutex::Lock () { return ( pthread_mutex_lock ( &m_tMutex )==0 ); } bool CSphMutex::Unlock () { return ( pthread_mutex_unlock ( &m_tMutex )==0 ); } #endif ////////////////////////////////////////////////////////////////////////// // RWLOCK ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS // Windows rwlock implementation CSphRwlock::CSphRwlock () : m_hWriteMutex ( NULL ) , m_hReadEvent ( NULL ) , m_iReaders ( 0 ) {} bool CSphRwlock::Init () { assert ( !m_hWriteMutex && !m_hReadEvent && !m_iReaders ); m_hReadEvent = CreateEvent ( NULL, TRUE, FALSE, NULL ); if ( !m_hReadEvent ) return false; m_hWriteMutex = CreateMutex ( NULL, FALSE, NULL ); if ( !m_hWriteMutex ) { CloseHandle ( m_hReadEvent ); m_hReadEvent = NULL; return false; } return true; } bool CSphRwlock::Done () { if ( !CloseHandle ( m_hReadEvent ) ) return false; m_hReadEvent = NULL; if ( !CloseHandle ( m_hWriteMutex ) ) return false; m_hWriteMutex = NULL; m_iReaders = 0; return true; } bool CSphRwlock::ReadLock () { DWORD uWait = WaitForSingleObject ( m_hWriteMutex, INFINITE ); if ( uWait==WAIT_FAILED || uWait==WAIT_TIMEOUT ) return false; // got the writer mutex, can't be locked for write // so it's OK to add the reader lock, then free the writer mutex // writer mutex also protects readers counter InterlockedIncrement ( &m_iReaders ); // reset writer lock event, we just got ourselves a reader if ( !ResetEvent ( m_hReadEvent ) ) return false; // release writer lock return ReleaseMutex ( m_hWriteMutex )==TRUE; } bool CSphRwlock::WriteLock () { // try to acquire writer mutex DWORD uWait = WaitForSingleObject ( m_hWriteMutex, INFINITE ); if ( uWait==WAIT_FAILED || uWait==WAIT_TIMEOUT ) return false; // got the writer mutex, no pending readers, rock'n'roll if ( !m_iReaders ) return true; // got the writer mutex, but still have to wait for all readers to complete uWait = WaitForSingleObject ( m_hReadEvent, INFINITE ); if ( uWait==WAIT_FAILED || uWait==WAIT_TIMEOUT ) { // wait failed, well then, release writer mutex ReleaseMutex ( m_hWriteMutex ); return false; } return true; } bool CSphRwlock::Unlock () { // are we unlocking a writer? if ( ReleaseMutex ( m_hWriteMutex ) ) return true; // yes we are if ( GetLastError()!=ERROR_NOT_OWNER ) return false; // some unexpected error // writer mutex wasn't mine; we must have a read lock if ( !m_iReaders ) return true; // could this ever happen? // atomically decrement reader counter if ( InterlockedDecrement ( &m_iReaders ) ) return true; // there still are pending readers // no pending readers, fire the event for write lock return SetEvent ( m_hReadEvent )==TRUE; } #else // UNIX rwlock implementation (pthreads wrapper) CSphRwlock::CSphRwlock () {} bool CSphRwlock::Init () { return pthread_rwlock_init ( &m_tLock, NULL )==0; } bool CSphRwlock::Done () { return pthread_rwlock_destroy ( &m_tLock )==0; } bool CSphRwlock::ReadLock () { return pthread_rwlock_rdlock ( &m_tLock )==0; } bool CSphRwlock::WriteLock () { return pthread_rwlock_wrlock ( &m_tLock )==0; } bool CSphRwlock::Unlock () { return pthread_rwlock_unlock ( &m_tLock )==0; } #endif // // $Id: sphinxstd.cpp 3130 2012-03-01 07:43:56Z tomat $ // sphinx-2.0.4-release/src/svnxrev.sh0000755000176700017710000000263011430743304016604 0ustar deogardeogar#!/bin/sh ## As standalone run from this directory as "./svnxrev.sh .." if [ -d $1/.svn ] ; then svn info --xml $1 | perl $1/src/svnxrev.pl $1/src/sphinxversion.h elif [ -d $1/.hg ] ; then ddr=`pwd` cd $1 target="src/sphinxversion.h" startrev=`hg id -n | sed 's/\\+//'` rev="$startrev" svnrev=`hg log -r$rev --template "{desc}" | grep ^\\\\[svn | sed 's/\\[svn r//; s/\\].*//'` # while [ "z" = "z$svnrev" ] ; do # revplus=`hg log -r$rev --template "{rev}"` # rev=`expr $revplus - 1` # svnrev=`hg log -r$rev --template "{desc}" | grep ^\\\\[svn | sed 's/\\[svn r//; s/\\].*//'` # done rsvnrev="r$svnrev" node=`hg log -r$startrev --template "{node}"` branch=`hg log -r$startrev --template "{branches}"` [ "z$branch" = "ztrunk" ] && branch="" [ ! "z$startrev" = "z$rev" ] && rsvnrev=r"$svnrev/$node" printf "#define SPH_SVN_TAG \"%s\"\n" $branch > $target.tmp printf "#define SPH_SVN_REV %s\n" $svnrev >> $target.tmp printf "#define SPH_SVN_REVSTR \"%s\"\n" $svnrev >> $target.tmp printf "#define SPH_SVN_TAGREV \"%s\"\n" $rsvnrev >> $target.tmp # dont touch it ALL the time if [ -f $target ]; then old=`cat $target` new=`cat $target.tmp` if [ "z$old" != "z$new" ] ; then mv -f $target.tmp $target else rm -f $target.tmp fi else mv -f $target.tmp $target fi cd "$ddr" exit 0 fi if [ ! -f ./sphinxversion.h ] ; then cat *.cpp *.h | perl srcxrev.pl > sphinxversion.h fi sphinx-2.0.4-release/src/llsphinxql.c0000644000176700017710000023352611636132170017107 0ustar deogardeogar#line 2 "llsphinxql.c" #line 4 "llsphinxql.c" #define YY_INT_ALIGNED short int /* A lexical scanner generated by flex */ #define FLEX_SCANNER #define YY_FLEX_MAJOR_VERSION 2 #define YY_FLEX_MINOR_VERSION 5 #define YY_FLEX_SUBMINOR_VERSION 35 #if YY_FLEX_SUBMINOR_VERSION > 0 #define FLEX_BETA #endif /* First, we deal with platform-specific or compiler-specific issues. */ /* begin standard C headers. */ #include #include #include #include /* end standard C headers. */ /* flex integer type definitions */ #ifndef FLEXINT_H #define FLEXINT_H /* C99 systems have . Non-C99 systems may or may not. */ #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 says to define __STDC_LIMIT_MACROS before including stdint.h, * if you want the limit (max/min) macros for int types. */ #ifndef __STDC_LIMIT_MACROS #define __STDC_LIMIT_MACROS 1 #endif #include typedef int8_t flex_int8_t; typedef uint8_t flex_uint8_t; typedef int16_t flex_int16_t; typedef uint16_t flex_uint16_t; typedef int32_t flex_int32_t; typedef uint32_t flex_uint32_t; #else typedef signed char flex_int8_t; typedef short int flex_int16_t; typedef int flex_int32_t; typedef unsigned char flex_uint8_t; typedef unsigned short int flex_uint16_t; typedef unsigned int flex_uint32_t; #endif /* ! C99 */ /* Limits of integral types. */ #ifndef INT8_MIN #define INT8_MIN (-128) #endif #ifndef INT16_MIN #define INT16_MIN (-32767-1) #endif #ifndef INT32_MIN #define INT32_MIN (-2147483647-1) #endif #ifndef INT8_MAX #define INT8_MAX (127) #endif #ifndef INT16_MAX #define INT16_MAX (32767) #endif #ifndef INT32_MAX #define INT32_MAX (2147483647) #endif #ifndef UINT8_MAX #define UINT8_MAX (255U) #endif #ifndef UINT16_MAX #define UINT16_MAX (65535U) #endif #ifndef UINT32_MAX #define UINT32_MAX (4294967295U) #endif #endif /* ! FLEXINT_H */ #ifdef __cplusplus /* The "const" storage-class-modifier is valid. */ #define YY_USE_CONST #else /* ! __cplusplus */ /* C99 requires __STDC__ to be defined as 1. */ #if defined (__STDC__) #define YY_USE_CONST #endif /* defined (__STDC__) */ #endif /* ! __cplusplus */ #ifdef YY_USE_CONST #define yyconst const #else #define yyconst #endif /* Returned upon end-of-file. */ #define YY_NULL 0 /* Promotes a possibly negative, possibly signed char to an unsigned * integer for use as an array index. If the signed char is negative, * we want to instead treat it as an 8-bit unsigned char, hence the * double cast. */ #define YY_SC_TO_UI(c) ((unsigned int) (unsigned char) c) /* An opaque pointer. */ #ifndef YY_TYPEDEF_YY_SCANNER_T #define YY_TYPEDEF_YY_SCANNER_T typedef void* yyscan_t; #endif /* For convenience, these vars (plus the bison vars far below) are macros in the reentrant scanner. */ #define yyin yyg->yyin_r #define yyout yyg->yyout_r #define yyextra yyg->yyextra_r #define yyleng yyg->yyleng_r #define yytext yyg->yytext_r #define yylineno (YY_CURRENT_BUFFER_LVALUE->yy_bs_lineno) #define yycolumn (YY_CURRENT_BUFFER_LVALUE->yy_bs_column) #define yy_flex_debug yyg->yy_flex_debug_r /* Enter a start condition. This macro really ought to take a parameter, * but we do it the disgusting crufty way forced on us by the ()-less * definition of BEGIN. */ #define BEGIN yyg->yy_start = 1 + 2 * /* Translate the current start state into a value that can be later handed * to BEGIN to return to the state. The YYSTATE alias is for lex * compatibility. */ #define YY_START ((yyg->yy_start - 1) / 2) #define YYSTATE YY_START /* Action number for EOF rule of a given start state. */ #define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1) /* Special action meaning "start processing a new file". */ #define YY_NEW_FILE yyrestart(yyin ,yyscanner ) #define YY_END_OF_BUFFER_CHAR 0 /* Size of default input buffer. */ #ifndef YY_BUF_SIZE #define YY_BUF_SIZE 16384 #endif /* The state buf must be large enough to hold one state per character in the main buffer. */ #define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type)) #ifndef YY_TYPEDEF_YY_BUFFER_STATE #define YY_TYPEDEF_YY_BUFFER_STATE typedef struct yy_buffer_state *YY_BUFFER_STATE; #endif #define EOB_ACT_CONTINUE_SCAN 0 #define EOB_ACT_END_OF_FILE 1 #define EOB_ACT_LAST_MATCH 2 #define YY_LESS_LINENO(n) /* Return all but the first "n" matched characters back to the input stream. */ #define yyless(n) \ do \ { \ /* Undo effects of setting up yytext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ *yy_cp = yyg->yy_hold_char; \ YY_RESTORE_YY_MORE_OFFSET \ yyg->yy_c_buf_p = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \ YY_DO_BEFORE_ACTION; /* set up yytext again */ \ } \ while ( 0 ) #define unput(c) yyunput( c, yyg->yytext_ptr , yyscanner ) #ifndef YY_TYPEDEF_YY_SIZE_T #define YY_TYPEDEF_YY_SIZE_T typedef size_t yy_size_t; #endif #ifndef YY_STRUCT_YY_BUFFER_STATE #define YY_STRUCT_YY_BUFFER_STATE struct yy_buffer_state { FILE *yy_input_file; char *yy_ch_buf; /* input buffer */ char *yy_buf_pos; /* current position in input buffer */ /* Size of input buffer in bytes, not including room for EOB * characters. */ yy_size_t yy_buf_size; /* Number of characters read into yy_ch_buf, not including EOB * characters. */ int yy_n_chars; /* Whether we "own" the buffer - i.e., we know we created it, * and can realloc() it to grow it, and should free() it to * delete it. */ int yy_is_our_buffer; /* Whether this is an "interactive" input source; if so, and * if we're using stdio for input, then we want to use getc() * instead of fread(), to make sure we stop fetching input after * each newline. */ int yy_is_interactive; /* Whether we're considered to be at the beginning of a line. * If so, '^' rules will be active on the next match, otherwise * not. */ int yy_at_bol; int yy_bs_lineno; /**< The line count. */ int yy_bs_column; /**< The column count. */ /* Whether to try to fill the input buffer when we reach the * end of it. */ int yy_fill_buffer; int yy_buffer_status; #define YY_BUFFER_NEW 0 #define YY_BUFFER_NORMAL 1 /* When an EOF's been seen but there's still some text to process * then we mark the buffer as YY_EOF_PENDING, to indicate that we * shouldn't try reading from the input source any more. We might * still have a bunch of tokens to match, though, because of * possible backing-up. * * When we actually see the EOF, we change the status to "new" * (via yyrestart()), so that the user can continue scanning by * just pointing yyin at a new input file. */ #define YY_BUFFER_EOF_PENDING 2 }; #endif /* !YY_STRUCT_YY_BUFFER_STATE */ /* We provide macros for accessing buffer states in case in the * future we want to put the buffer states in a more general * "scanner state". * * Returns the top of the stack, or NULL. */ #define YY_CURRENT_BUFFER ( yyg->yy_buffer_stack \ ? yyg->yy_buffer_stack[yyg->yy_buffer_stack_top] \ : NULL) /* Same as previous macro, but useful when we know that the buffer stack is not * NULL or when we need an lvalue. For internal use only. */ #define YY_CURRENT_BUFFER_LVALUE yyg->yy_buffer_stack[yyg->yy_buffer_stack_top] void yyrestart (FILE *input_file ,yyscan_t yyscanner ); void yy_switch_to_buffer (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner ); YY_BUFFER_STATE yy_create_buffer (FILE *file,int size ,yyscan_t yyscanner ); void yy_delete_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner ); void yy_flush_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner ); void yypush_buffer_state (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner ); void yypop_buffer_state (yyscan_t yyscanner ); static void yyensure_buffer_stack (yyscan_t yyscanner ); static void yy_load_buffer_state (yyscan_t yyscanner ); static void yy_init_buffer (YY_BUFFER_STATE b,FILE *file ,yyscan_t yyscanner ); #define YY_FLUSH_BUFFER yy_flush_buffer(YY_CURRENT_BUFFER ,yyscanner) YY_BUFFER_STATE yy_scan_buffer (char *base,yy_size_t size ,yyscan_t yyscanner ); YY_BUFFER_STATE yy_scan_string (yyconst char *yy_str ,yyscan_t yyscanner ); YY_BUFFER_STATE yy_scan_bytes (yyconst char *bytes,int len ,yyscan_t yyscanner ); void *yyalloc (yy_size_t ,yyscan_t yyscanner ); void *yyrealloc (void *,yy_size_t ,yyscan_t yyscanner ); void yyfree (void * ,yyscan_t yyscanner ); #define yy_new_buffer yy_create_buffer #define yy_set_interactive(is_interactive) \ { \ if ( ! YY_CURRENT_BUFFER ){ \ yyensure_buffer_stack (yyscanner); \ YY_CURRENT_BUFFER_LVALUE = \ yy_create_buffer(yyin,YY_BUF_SIZE ,yyscanner); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \ } #define yy_set_bol(at_bol) \ { \ if ( ! YY_CURRENT_BUFFER ){\ yyensure_buffer_stack (yyscanner); \ YY_CURRENT_BUFFER_LVALUE = \ yy_create_buffer(yyin,YY_BUF_SIZE ,yyscanner); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \ } #define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol) /* Begin user sect3 */ #define yywrap(n) 1 #define YY_SKIP_YYWRAP typedef unsigned char YY_CHAR; typedef int yy_state_type; #define yytext_ptr yytext_r static yy_state_type yy_get_previous_state (yyscan_t yyscanner ); static yy_state_type yy_try_NUL_trans (yy_state_type current_state ,yyscan_t yyscanner); static int yy_get_next_buffer (yyscan_t yyscanner ); static void yy_fatal_error (yyconst char msg[] ,yyscan_t yyscanner ); /* Done after the current pattern has been matched and before the * corresponding action - sets up yytext. */ #define YY_DO_BEFORE_ACTION \ yyg->yytext_ptr = yy_bp; \ yyleng = (size_t) (yy_cp - yy_bp); \ yyg->yy_hold_char = *yy_cp; \ *yy_cp = '\0'; \ yyg->yy_c_buf_p = yy_cp; #define YY_NUM_RULES 96 #define YY_END_OF_BUFFER 97 /* This struct is not used in this scanner, but its presence is necessary. */ struct yy_trans_info { flex_int32_t yy_verify; flex_int32_t yy_nxt; }; static yyconst flex_int16_t yy_accept[374] = { 0, 0, 0, 0, 0, 97, 95, 94, 94, 95, 95, 85, 95, 86, 95, 95, 95, 95, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 95, 2, 96, 2, 94, 79, 0, 84, 0, 85, 1, 85, 86, 83, 81, 80, 82, 0, 91, 91, 91, 91, 90, 90, 5, 90, 90, 90, 11, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 31, 32, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 49, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 68, 90, 90, 90, 90, 90, 90, 90, 90, 0, 3, 0, 92, 91, 91, 87, 91, 4, 6, 90, 8, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 22, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 35, 90, 90, 90, 90, 41, 90, 43, 44, 90, 46, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 60, 90, 90, 90, 66, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 0, 93, 92, 91, 91, 90, 90, 90, 12, 90, 90, 90, 90, 90, 19, 90, 23, 90, 90, 90, 27, 90, 90, 90, 90, 90, 36, 90, 90, 90, 90, 42, 90, 47, 90, 90, 51, 52, 90, 90, 90, 90, 90, 90, 90, 90, 62, 90, 90, 90, 90, 90, 70, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 90, 9, 90, 90, 90, 16, 90, 90, 90, 90, 24, 25, 26, 90, 90, 30, 33, 90, 90, 38, 39, 40, 45, 90, 50, 90, 90, 90, 90, 90, 90, 90, 90, 90, 64, 90, 90, 90, 90, 90, 90, 90, 90, 90, 77, 90, 89, 91, 7, 90, 90, 14, 17, 18, 90, 90, 90, 29, 34, 90, 48, 90, 90, 90, 90, 90, 58, 90, 90, 63, 65, 67, 90, 90, 72, 73, 90, 90, 76, 78, 88, 10, 90, 90, 90, 90, 90, 90, 90, 54, 55, 90, 57, 90, 61, 90, 90, 90, 90, 90, 90, 20, 21, 28, 90, 90, 56, 90, 90, 90, 90, 75, 13, 15, 37, 90, 90, 90, 90, 74, 53, 90, 90, 90, 90, 69, 71, 59, 0 } ; static yyconst flex_int32_t yy_ec[256] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 1, 1, 1, 1, 1, 5, 1, 1, 6, 1, 1, 1, 7, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 1, 11, 12, 13, 1, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 24, 31, 32, 33, 34, 35, 36, 37, 38, 39, 1, 40, 1, 1, 24, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 24, 51, 52, 53, 54, 55, 56, 24, 57, 58, 59, 60, 61, 62, 63, 64, 65, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 } ; static yyconst flex_int32_t yy_meta[66] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 } ; static yyconst flex_int16_t yy_base[383] = { 0, 0, 0, 63, 64, 210, 769, 69, 71, 194, 63, 188, 183, 68, 175, 66, 144, 66, 53, 63, 75, 74, 0, 103, 69, 107, 73, 125, 128, 116, 149, 190, 161, 123, 100, 179, 0, 769, 769, 142, 96, 769, 82, 769, 73, 140, 769, 132, 129, 769, 769, 769, 769, 0, 0, 104, 140, 147, 0, 151, 167, 141, 165, 184, 0, 155, 188, 180, 201, 199, 191, 204, 208, 206, 210, 215, 218, 0, 233, 225, 220, 231, 234, 229, 241, 234, 245, 241, 249, 243, 259, 254, 279, 259, 260, 287, 260, 262, 272, 269, 283, 0, 290, 286, 288, 297, 286, 284, 306, 294, 67, 769, 324, 0, 0, 297, 0, 313, 0, 0, 325, 0, 318, 311, 322, 325, 328, 328, 327, 339, 342, 328, 0, 332, 333, 351, 338, 344, 355, 359, 344, 357, 360, 354, 358, 361, 366, 373, 0, 377, 0, 0, 375, 0, 372, 372, 381, 383, 387, 389, 377, 386, 385, 399, 397, 382, 0, 387, 410, 398, 0, 404, 405, 407, 405, 421, 408, 420, 418, 423, 417, 428, 53, 769, 0, 426, 430, 441, 433, 443, 0, 449, 442, 434, 436, 440, 444, 453, 0, 460, 448, 460, 0, 451, 471, 458, 453, 463, 0, 481, 472, 467, 480, 0, 472, 0, 476, 477, 0, 0, 496, 497, 484, 501, 500, 502, 506, 499, 0, 498, 494, 494, 513, 503, 0, 509, 504, 514, 525, 519, 521, 528, 527, 519, 533, 534, 0, 538, 531, 532, 0, 539, 547, 545, 543, 0, 0, 0, 549, 549, 0, 0, 543, 546, 0, 0, 0, 0, 552, 0, 548, 556, 558, 572, 569, 559, 568, 567, 570, 0, 566, 571, 589, 581, 590, 579, 597, 589, 586, 0, 593, 0, 590, 0, 597, 603, 595, 0, 0, 614, 615, 606, 0, 0, 610, 0, 623, 620, 609, 625, 607, 0, 623, 620, 0, 0, 0, 633, 629, 0, 0, 629, 635, 0, 0, 0, 0, 629, 643, 644, 635, 643, 643, 648, 0, 0, 650, 0, 637, 0, 645, 647, 663, 653, 658, 669, 0, 0, 0, 664, 667, 0, 673, 672, 666, 668, 0, 0, 0, 0, 677, 687, 676, 688, 0, 0, 684, 685, 690, 697, 0, 0, 0, 769, 743, 748, 751, 754, 87, 80, 757, 760, 764 } ; static yyconst flex_int16_t yy_def[383] = { 0, 373, 1, 374, 374, 373, 373, 373, 373, 373, 375, 373, 373, 373, 373, 373, 373, 376, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 378, 373, 373, 373, 373, 373, 375, 373, 375, 373, 373, 373, 373, 373, 373, 373, 373, 379, 380, 380, 380, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 381, 373, 375, 382, 380, 380, 380, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 381, 373, 382, 380, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 380, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 380, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 380, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 0, 373, 373, 373, 373, 373, 373, 373, 373, 373 } ; static yyconst flex_int16_t yy_nxt[835] = { 0, 6, 7, 8, 9, 10, 6, 11, 12, 13, 14, 15, 6, 16, 17, 18, 19, 20, 21, 22, 23, 24, 22, 25, 22, 22, 26, 27, 28, 29, 22, 30, 31, 32, 33, 34, 35, 22, 22, 22, 6, 36, 18, 19, 20, 21, 22, 23, 24, 22, 25, 22, 26, 27, 28, 29, 22, 30, 31, 32, 33, 34, 35, 22, 22, 22, 38, 38, 43, 39, 39, 40, 40, 40, 40, 47, 373, 48, 50, 51, 53, 59, 63, 55, 113, 60, 61, 43, 62, 56, 65, 110, 80, 68, 183, 75, 81, 69, 40, 40, 76, 64, 57, 44, 66, 70, 67, 59, 183, 63, 55, 60, 61, 112, 62, 105, 56, 65, 71, 80, 68, 75, 44, 81, 69, 77, 76, 64, 57, 72, 66, 70, 67, 115, 73, 78, 47, 74, 48, 79, 82, 45, 105, 86, 83, 71, 89, 90, 84, 45, 111, 103, 77, 104, 85, 72, 52, 87, 116, 115, 73, 78, 88, 74, 91, 79, 117, 82, 92, 118, 86, 83, 89, 90, 120, 84, 100, 103, 93, 104, 85, 124, 94, 87, 119, 116, 121, 49, 88, 46, 101, 91, 102, 117, 106, 92, 118, 45, 107, 128, 120, 108, 109, 100, 93, 122, 41, 124, 94, 95, 373, 119, 96, 121, 125, 126, 101, 123, 102, 97, 133, 106, 127, 98, 99, 107, 128, 129, 108, 109, 134, 131, 122, 130, 132, 137, 95, 135, 138, 96, 125, 126, 136, 123, 139, 97, 133, 140, 127, 98, 99, 141, 151, 129, 144, 145, 134, 131, 146, 130, 132, 137, 149, 135, 138, 142, 143, 147, 136, 150, 139, 148, 152, 140, 153, 154, 155, 156, 141, 151, 144, 145, 157, 162, 146, 161, 373, 169, 149, 167, 168, 142, 143, 147, 158, 150, 170, 148, 152, 171, 153, 154, 155, 174, 156, 172, 175, 179, 157, 159, 162, 161, 160, 163, 169, 167, 168, 178, 164, 165, 166, 158, 170, 176, 173, 180, 171, 181, 177, 43, 174, 185, 172, 175, 179, 159, 186, 373, 160, 163, 187, 188, 194, 178, 164, 165, 166, 189, 190, 176, 173, 191, 180, 181, 177, 192, 193, 185, 195, 196, 373, 197, 198, 186, 44, 199, 200, 187, 188, 194, 201, 202, 203, 189, 190, 204, 206, 191, 205, 207, 210, 192, 193, 208, 209, 195, 196, 197, 198, 211, 212, 199, 213, 200, 214, 216, 201, 202, 215, 203, 217, 218, 204, 206, 205, 219, 207, 210, 220, 208, 209, 222, 223, 224, 227, 221, 211, 212, 225, 213, 226, 214, 216, 228, 215, 229, 234, 217, 218, 230, 232, 231, 219, 233, 235, 220, 236, 222, 223, 224, 227, 221, 237, 238, 240, 225, 239, 226, 241, 228, 242, 244, 229, 234, 243, 230, 232, 231, 245, 233, 235, 246, 247, 236, 248, 249, 373, 250, 237, 251, 238, 240, 239, 252, 241, 253, 254, 242, 244, 255, 243, 256, 257, 373, 258, 245, 259, 246, 260, 247, 261, 248, 249, 250, 262, 251, 263, 373, 264, 252, 265, 253, 266, 254, 267, 268, 255, 256, 269, 257, 258, 270, 271, 259, 260, 272, 261, 273, 274, 275, 262, 276, 277, 263, 264, 278, 265, 279, 280, 266, 267, 268, 281, 285, 269, 282, 283, 284, 270, 271, 286, 272, 287, 288, 273, 274, 275, 289, 276, 277, 290, 278, 291, 279, 280, 292, 293, 294, 297, 281, 285, 282, 283, 284, 295, 296, 298, 286, 299, 287, 288, 300, 301, 307, 289, 302, 303, 290, 291, 304, 305, 306, 292, 293, 294, 297, 308, 309, 310, 314, 295, 296, 311, 298, 312, 299, 313, 300, 315, 301, 307, 302, 303, 316, 317, 304, 305, 306, 318, 319, 373, 320, 308, 321, 309, 310, 314, 322, 311, 323, 312, 324, 313, 325, 315, 326, 327, 373, 328, 316, 329, 317, 330, 332, 318, 331, 319, 320, 333, 334, 321, 335, 336, 322, 337, 323, 338, 324, 339, 325, 340, 326, 341, 327, 328, 342, 343, 329, 344, 330, 332, 331, 345, 346, 350, 333, 334, 335, 347, 336, 337, 348, 349, 338, 339, 351, 352, 340, 353, 341, 354, 342, 355, 343, 344, 356, 357, 358, 361, 345, 346, 350, 359, 360, 347, 362, 365, 348, 349, 363, 364, 351, 352, 366, 353, 367, 354, 368, 371, 355, 369, 356, 357, 370, 358, 361, 372, 373, 359, 360, 373, 373, 362, 365, 373, 363, 364, 373, 373, 373, 366, 367, 373, 373, 368, 371, 369, 373, 373, 370, 373, 373, 373, 372, 37, 37, 37, 37, 37, 42, 42, 42, 42, 42, 54, 54, 58, 373, 58, 114, 373, 114, 182, 373, 182, 182, 184, 373, 184, 5, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373 } ; static yyconst flex_int16_t yy_chk[835] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 4, 10, 3, 4, 7, 7, 8, 8, 13, 44, 13, 15, 15, 17, 18, 19, 17, 379, 18, 18, 42, 18, 17, 20, 378, 26, 21, 182, 24, 26, 21, 40, 40, 24, 19, 17, 10, 20, 21, 20, 18, 110, 19, 17, 18, 18, 44, 18, 34, 17, 20, 23, 26, 21, 24, 42, 26, 21, 25, 24, 19, 17, 23, 20, 21, 20, 55, 23, 25, 48, 23, 48, 25, 27, 47, 34, 28, 27, 23, 29, 29, 27, 45, 39, 33, 25, 33, 27, 23, 16, 28, 56, 55, 23, 25, 28, 23, 30, 25, 57, 27, 30, 59, 28, 27, 29, 29, 61, 27, 32, 33, 30, 33, 27, 65, 30, 28, 60, 56, 62, 14, 28, 12, 32, 30, 32, 57, 35, 30, 59, 11, 35, 67, 61, 35, 35, 32, 30, 63, 9, 65, 30, 31, 5, 60, 31, 62, 66, 66, 32, 63, 32, 31, 70, 35, 66, 31, 31, 35, 67, 68, 35, 35, 71, 69, 63, 68, 69, 73, 31, 72, 74, 31, 66, 66, 72, 63, 75, 31, 70, 76, 66, 31, 31, 78, 85, 68, 79, 80, 71, 69, 81, 68, 69, 73, 83, 72, 74, 78, 78, 82, 72, 84, 75, 82, 86, 76, 87, 88, 89, 90, 78, 85, 79, 80, 91, 94, 81, 93, 0, 98, 83, 96, 97, 78, 78, 82, 92, 84, 99, 82, 86, 100, 87, 88, 89, 103, 90, 102, 104, 107, 91, 92, 94, 93, 92, 95, 98, 96, 97, 106, 95, 95, 95, 92, 99, 105, 102, 108, 100, 109, 105, 112, 103, 115, 102, 104, 107, 92, 117, 0, 92, 95, 120, 122, 128, 106, 95, 95, 95, 123, 124, 105, 102, 125, 108, 109, 105, 126, 127, 115, 129, 130, 0, 131, 133, 117, 112, 134, 135, 120, 122, 128, 136, 137, 138, 123, 124, 139, 141, 125, 140, 142, 145, 126, 127, 143, 144, 129, 130, 131, 133, 146, 147, 134, 149, 135, 152, 155, 136, 137, 154, 138, 156, 157, 139, 141, 140, 158, 142, 145, 159, 143, 144, 160, 161, 162, 165, 159, 146, 147, 163, 149, 164, 152, 155, 167, 154, 168, 173, 156, 157, 169, 171, 169, 158, 172, 174, 159, 175, 160, 161, 162, 165, 159, 176, 177, 179, 163, 178, 164, 180, 167, 181, 186, 168, 173, 185, 169, 171, 169, 187, 172, 174, 188, 189, 175, 191, 192, 0, 193, 176, 194, 177, 179, 178, 195, 180, 196, 197, 181, 186, 199, 185, 200, 201, 0, 203, 187, 204, 188, 205, 189, 206, 191, 192, 193, 207, 194, 209, 0, 210, 195, 211, 196, 212, 197, 214, 216, 199, 200, 217, 201, 203, 220, 221, 204, 205, 222, 206, 223, 224, 225, 207, 226, 227, 209, 210, 229, 211, 230, 231, 212, 214, 216, 232, 237, 217, 233, 235, 236, 220, 221, 238, 222, 239, 240, 223, 224, 225, 241, 226, 227, 242, 229, 243, 230, 231, 244, 245, 247, 251, 232, 237, 233, 235, 236, 248, 249, 252, 238, 253, 239, 240, 254, 258, 271, 241, 259, 262, 242, 243, 263, 268, 270, 244, 245, 247, 251, 272, 273, 274, 278, 248, 249, 275, 252, 276, 253, 277, 254, 280, 258, 271, 259, 262, 281, 282, 263, 268, 270, 283, 284, 0, 285, 272, 286, 273, 274, 278, 287, 275, 288, 276, 290, 277, 292, 280, 294, 295, 0, 296, 281, 299, 282, 300, 304, 283, 301, 284, 285, 306, 307, 286, 308, 309, 287, 310, 288, 312, 290, 313, 292, 317, 294, 318, 295, 296, 321, 322, 299, 327, 300, 304, 301, 328, 329, 333, 306, 307, 308, 330, 309, 310, 331, 332, 312, 313, 336, 338, 317, 340, 318, 341, 321, 342, 322, 327, 343, 344, 345, 352, 328, 329, 333, 349, 350, 330, 353, 360, 331, 332, 354, 355, 336, 338, 361, 340, 362, 341, 363, 368, 342, 366, 343, 344, 367, 345, 352, 369, 0, 349, 350, 0, 0, 353, 360, 0, 354, 355, 0, 0, 0, 361, 362, 0, 0, 363, 368, 366, 0, 0, 367, 0, 0, 0, 369, 374, 374, 374, 374, 374, 375, 375, 375, 375, 375, 376, 376, 377, 0, 377, 380, 0, 380, 381, 0, 381, 381, 382, 0, 382, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373, 373 } ; /* The intent behind this definition is that it'll catch * any uses of REJECT which flex missed. */ #define REJECT reject_used_but_not_detected #define yymore() yymore_used_but_not_detected #define YY_MORE_ADJ 0 #define YY_RESTORE_YY_MORE_OFFSET #line 1 "sphinxql.l" #line 2 "sphinxql.l" #include "yysphinxql.h" #if USE_WINDOWS #pragma warning(push,1) #endif // warning, lexer generator dependent! // this macro relies on that in flex yytext points to the actual location in the buffer #define YYSTOREBOUNDS \ { \ lvalp->m_iStart = yytext - pParser->m_pBuf; \ lvalp->m_iEnd = lvalp->m_iStart + yyleng; \ pParser->m_pLastTokenStart = yytext; \ } #line 759 "llsphinxql.c" #define INITIAL 0 #define ccomment 1 #ifndef YY_NO_UNISTD_H /* Special case for "unistd.h", since it is non-ANSI. We include it way * down here because we want the user's section 1 to have been scanned first. * The user has a chance to override it with an option. */ #if !USE_WINDOWS #include #endif #endif #ifndef YY_EXTRA_TYPE #define YY_EXTRA_TYPE void * #endif /* Holds the entire state of the reentrant scanner. */ struct yyguts_t { /* User-defined. Not touched by flex. */ YY_EXTRA_TYPE yyextra_r; /* The rest are the same as the globals declared in the non-reentrant scanner. */ FILE *yyin_r, *yyout_r; size_t yy_buffer_stack_top; /**< index of top of stack. */ size_t yy_buffer_stack_max; /**< capacity of stack. */ YY_BUFFER_STATE * yy_buffer_stack; /**< Stack as an array. */ char yy_hold_char; int yy_n_chars; int yyleng_r; char *yy_c_buf_p; int yy_init; int yy_start; int yy_did_buffer_switch_on_eof; int yy_start_stack_ptr; int yy_start_stack_depth; int *yy_start_stack; yy_state_type yy_last_accepting_state; char* yy_last_accepting_cpos; int yylineno_r; int yy_flex_debug_r; char *yytext_r; int yy_more_flag; int yy_more_len; }; /* end struct yyguts_t */ static int yy_init_globals (yyscan_t yyscanner ); int yylex_init (yyscan_t* scanner); int yylex_init_extra (YY_EXTRA_TYPE user_defined,yyscan_t* scanner); /* Accessor methods to globals. These are made visible to non-reentrant scanners for convenience. */ int yylex_destroy (yyscan_t yyscanner ); int yyget_debug (yyscan_t yyscanner ); void yyset_debug (int debug_flag ,yyscan_t yyscanner ); YY_EXTRA_TYPE yyget_extra (yyscan_t yyscanner ); void yyset_extra (YY_EXTRA_TYPE user_defined ,yyscan_t yyscanner ); FILE *yyget_in (yyscan_t yyscanner ); void yyset_in (FILE * in_str ,yyscan_t yyscanner ); FILE *yyget_out (yyscan_t yyscanner ); void yyset_out (FILE * out_str ,yyscan_t yyscanner ); int yyget_leng (yyscan_t yyscanner ); char *yyget_text (yyscan_t yyscanner ); int yyget_lineno (yyscan_t yyscanner ); void yyset_lineno (int line_number ,yyscan_t yyscanner ); /* Macros after this point can all be overridden by user definitions in * section 1. */ #ifndef YY_SKIP_YYWRAP #ifdef __cplusplus extern "C" int yywrap (yyscan_t yyscanner ); #else extern int yywrap (yyscan_t yyscanner ); #endif #endif #ifndef yytext_ptr static void yy_flex_strncpy (char *,yyconst char *,int ,yyscan_t yyscanner); #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen (yyconst char * ,yyscan_t yyscanner); #endif #ifndef YY_NO_INPUT #ifdef __cplusplus static int yyinput (yyscan_t yyscanner ); #else static int input (yyscan_t yyscanner ); #endif #endif /* Amount of stuff to slurp up with each read. */ #ifndef YY_READ_BUF_SIZE #define YY_READ_BUF_SIZE 8192 #endif /* Copy whatever the last rule matched to the standard output. */ #ifndef ECHO /* This used to be an fputs(), but since the string might contain NUL's, * we now use fwrite(). */ #define ECHO fwrite( yytext, yyleng, 1, yyout ) #endif /* Gets input and stuffs it into "buf". number of characters read, or YY_NULL, * is returned in "result". */ #ifndef YY_INPUT #define YY_INPUT(buf,result,max_size) \ if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \ { \ int c = '*'; \ int n; \ for ( n = 0; n < max_size && \ (c = getc( yyin )) != EOF && c != '\n'; ++n ) \ buf[n] = (char) c; \ if ( c == '\n' ) \ buf[n++] = (char) c; \ if ( c == EOF && ferror( yyin ) ) \ YY_FATAL_ERROR( "input in flex scanner failed" ); \ result = n; \ } \ else \ { \ errno=0; \ while ( (result = fread(buf, 1, max_size, yyin))==0 && ferror(yyin)) \ { \ if( errno != EINTR) \ { \ YY_FATAL_ERROR( "input in flex scanner failed" ); \ break; \ } \ errno=0; \ clearerr(yyin); \ } \ }\ \ #endif /* No semi-colon after return; correct usage is to write "yyterminate();" - * we don't want an extra ';' after the "return" because that will cause * some compilers to complain about unreachable statements. */ #ifndef yyterminate #define yyterminate() return YY_NULL #endif /* Number of entries by which start-condition stack grows. */ #ifndef YY_START_STACK_INCR #define YY_START_STACK_INCR 25 #endif /* Report a fatal error. */ #ifndef YY_FATAL_ERROR #define YY_FATAL_ERROR(msg) yy_fatal_error( msg , yyscanner) #endif /* end tables serialization structures and prototypes */ /* Default declaration of generated scanner - a define so the user can * easily add parameters. */ #ifndef YY_DECL #define YY_DECL_IS_OURS 1 extern int yylex (yyscan_t yyscanner); #define YY_DECL int yylex (yyscan_t yyscanner) #endif /* !YY_DECL */ /* Code executed at the beginning of each rule, after yytext and yyleng * have been set up. */ #ifndef YY_USER_ACTION #define YY_USER_ACTION #endif /* Code executed at the end of each rule. */ #ifndef YY_BREAK #define YY_BREAK break; #endif #define YY_RULE_SETUP \ YY_USER_ACTION /** The main scanner function which does all the work. */ YY_DECL { register yy_state_type yy_current_state; register char *yy_cp, *yy_bp; register int yy_act; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; #line 29 "sphinxql.l" #line 982 "llsphinxql.c" if ( !yyg->yy_init ) { yyg->yy_init = 1; #ifdef YY_USER_INIT YY_USER_INIT; #endif if ( ! yyg->yy_start ) yyg->yy_start = 1; /* first start state */ if ( ! yyin ) yyin = stdin; if ( ! yyout ) yyout = stdout; if ( ! YY_CURRENT_BUFFER ) { yyensure_buffer_stack (yyscanner); YY_CURRENT_BUFFER_LVALUE = yy_create_buffer(yyin,YY_BUF_SIZE ,yyscanner); } yy_load_buffer_state(yyscanner ); } while ( 1 ) /* loops until end-of-file is reached */ { yy_cp = yyg->yy_c_buf_p; /* Support of yytext. */ *yy_cp = yyg->yy_hold_char; /* yy_bp points to the position in yy_ch_buf of the start of * the current run. */ yy_bp = yy_cp; yy_current_state = yyg->yy_start; yy_match: do { register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)]; if ( yy_accept[yy_current_state] ) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 374 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; ++yy_cp; } while ( yy_base[yy_current_state] != 769 ); yy_find_action: yy_act = yy_accept[yy_current_state]; if ( yy_act == 0 ) { /* have to back up */ yy_cp = yyg->yy_last_accepting_cpos; yy_current_state = yyg->yy_last_accepting_state; yy_act = yy_accept[yy_current_state]; } YY_DO_BEFORE_ACTION; do_action: /* This label is used only to access EOF actions. */ switch ( yy_act ) { /* beginning of action switch */ case 0: /* must back up */ /* undo the effects of YY_DO_BEFORE_ACTION */ *yy_cp = yyg->yy_hold_char; yy_cp = yyg->yy_last_accepting_cpos; yy_current_state = yyg->yy_last_accepting_state; goto yy_find_action; case 1: YY_RULE_SETUP #line 31 "sphinxql.l" { BEGIN(ccomment); } YY_BREAK case 2: YY_RULE_SETUP #line 32 "sphinxql.l" { } YY_BREAK case 3: YY_RULE_SETUP #line 33 "sphinxql.l" { BEGIN(INITIAL); } YY_BREAK case 4: YY_RULE_SETUP #line 35 "sphinxql.l" { YYSTOREBOUNDS; return TOK_AND; } YY_BREAK case 5: YY_RULE_SETUP #line 36 "sphinxql.l" { YYSTOREBOUNDS; return TOK_AS; } YY_BREAK case 6: YY_RULE_SETUP #line 37 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ASC; } YY_BREAK case 7: YY_RULE_SETUP #line 38 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ATTACH; } YY_BREAK case 8: YY_RULE_SETUP #line 39 "sphinxql.l" { YYSTOREBOUNDS; return TOK_AVG; } YY_BREAK case 9: YY_RULE_SETUP #line 40 "sphinxql.l" { YYSTOREBOUNDS; return TOK_BEGIN; } YY_BREAK case 10: YY_RULE_SETUP #line 41 "sphinxql.l" { YYSTOREBOUNDS; return TOK_BETWEEN; } YY_BREAK case 11: YY_RULE_SETUP #line 42 "sphinxql.l" { YYSTOREBOUNDS; return TOK_BY; } YY_BREAK case 12: YY_RULE_SETUP #line 43 "sphinxql.l" { YYSTOREBOUNDS; return TOK_CALL; } YY_BREAK case 13: YY_RULE_SETUP #line 44 "sphinxql.l" { YYSTOREBOUNDS; return TOK_COLLATION; } YY_BREAK case 14: YY_RULE_SETUP #line 45 "sphinxql.l" { YYSTOREBOUNDS; return TOK_COMMIT; } YY_BREAK case 15: YY_RULE_SETUP #line 46 "sphinxql.l" { YYSTOREBOUNDS; return TOK_COMMITTED; } YY_BREAK case 16: YY_RULE_SETUP #line 47 "sphinxql.l" { YYSTOREBOUNDS; return TOK_COUNT; } YY_BREAK case 17: YY_RULE_SETUP #line 48 "sphinxql.l" { YYSTOREBOUNDS; return TOK_CREATE; } YY_BREAK case 18: YY_RULE_SETUP #line 49 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DELETE; } YY_BREAK case 19: YY_RULE_SETUP #line 50 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DESC; } YY_BREAK case 20: YY_RULE_SETUP #line 51 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DESCRIBE; } YY_BREAK case 21: YY_RULE_SETUP #line 52 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DISTINCT; } YY_BREAK case 22: YY_RULE_SETUP #line 53 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DIV; } YY_BREAK case 23: YY_RULE_SETUP #line 54 "sphinxql.l" { YYSTOREBOUNDS; return TOK_DROP; } YY_BREAK case 24: YY_RULE_SETUP #line 55 "sphinxql.l" { YYSTOREBOUNDS; return TOK_FALSE; } YY_BREAK case 25: YY_RULE_SETUP #line 56 "sphinxql.l" { YYSTOREBOUNDS; return TOK_FLOAT; } YY_BREAK case 26: YY_RULE_SETUP #line 57 "sphinxql.l" { YYSTOREBOUNDS; return TOK_FLUSH; } YY_BREAK case 27: YY_RULE_SETUP #line 58 "sphinxql.l" { YYSTOREBOUNDS; return TOK_FROM; } YY_BREAK case 28: YY_RULE_SETUP #line 59 "sphinxql.l" { YYSTOREBOUNDS; return TOK_FUNCTION; } YY_BREAK case 29: YY_RULE_SETUP #line 60 "sphinxql.l" { YYSTOREBOUNDS; return TOK_GLOBAL; } YY_BREAK case 30: YY_RULE_SETUP #line 61 "sphinxql.l" { YYSTOREBOUNDS; return TOK_GROUP; } YY_BREAK case 31: YY_RULE_SETUP #line 62 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ID; } YY_BREAK case 32: YY_RULE_SETUP #line 63 "sphinxql.l" { YYSTOREBOUNDS; return TOK_IN; } YY_BREAK case 33: YY_RULE_SETUP #line 64 "sphinxql.l" { YYSTOREBOUNDS; return TOK_INDEX; } YY_BREAK case 34: YY_RULE_SETUP #line 65 "sphinxql.l" { YYSTOREBOUNDS; return TOK_INSERT; } YY_BREAK case 35: YY_RULE_SETUP #line 66 "sphinxql.l" { YYSTOREBOUNDS; return TOK_INT; } YY_BREAK case 36: YY_RULE_SETUP #line 67 "sphinxql.l" { YYSTOREBOUNDS; return TOK_INTO; } YY_BREAK case 37: YY_RULE_SETUP #line 68 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ISOLATION; } YY_BREAK case 38: YY_RULE_SETUP #line 69 "sphinxql.l" { YYSTOREBOUNDS; return TOK_LEVEL; } YY_BREAK case 39: YY_RULE_SETUP #line 70 "sphinxql.l" { YYSTOREBOUNDS; return TOK_LIMIT; } YY_BREAK case 40: YY_RULE_SETUP #line 71 "sphinxql.l" { YYSTOREBOUNDS; return TOK_MATCH; } YY_BREAK case 41: YY_RULE_SETUP #line 72 "sphinxql.l" { YYSTOREBOUNDS; return TOK_MAX; } YY_BREAK case 42: YY_RULE_SETUP #line 73 "sphinxql.l" { YYSTOREBOUNDS; return TOK_META; } YY_BREAK case 43: YY_RULE_SETUP #line 74 "sphinxql.l" { YYSTOREBOUNDS; return TOK_MIN; } YY_BREAK case 44: YY_RULE_SETUP #line 75 "sphinxql.l" { YYSTOREBOUNDS; return TOK_MOD; } YY_BREAK case 45: YY_RULE_SETUP #line 76 "sphinxql.l" { YYSTOREBOUNDS; return TOK_NAMES; } YY_BREAK case 46: YY_RULE_SETUP #line 77 "sphinxql.l" { YYSTOREBOUNDS; return TOK_NOT; } YY_BREAK case 47: YY_RULE_SETUP #line 78 "sphinxql.l" { YYSTOREBOUNDS; return TOK_NULL; } YY_BREAK case 48: YY_RULE_SETUP #line 79 "sphinxql.l" { YYSTOREBOUNDS; return TOK_OPTION; } YY_BREAK case 49: YY_RULE_SETUP #line 80 "sphinxql.l" { YYSTOREBOUNDS; return TOK_OR; } YY_BREAK case 50: YY_RULE_SETUP #line 81 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ORDER; } YY_BREAK case 51: YY_RULE_SETUP #line 82 "sphinxql.l" { YYSTOREBOUNDS; return TOK_RAND; } YY_BREAK case 52: YY_RULE_SETUP #line 83 "sphinxql.l" { YYSTOREBOUNDS; return TOK_READ; } YY_BREAK case 53: YY_RULE_SETUP #line 84 "sphinxql.l" { YYSTOREBOUNDS; return TOK_REPEATABLE; } YY_BREAK case 54: YY_RULE_SETUP #line 85 "sphinxql.l" { YYSTOREBOUNDS; return TOK_REPLACE; } YY_BREAK case 55: YY_RULE_SETUP #line 86 "sphinxql.l" { YYSTOREBOUNDS; return TOK_RETURNS; } YY_BREAK case 56: YY_RULE_SETUP #line 87 "sphinxql.l" { YYSTOREBOUNDS; return TOK_ROLLBACK; } YY_BREAK case 57: YY_RULE_SETUP #line 88 "sphinxql.l" { YYSTOREBOUNDS; return TOK_RTINDEX; } YY_BREAK case 58: YY_RULE_SETUP #line 89 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SELECT; } YY_BREAK case 59: YY_RULE_SETUP #line 90 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SERIALIZABLE; } YY_BREAK case 60: YY_RULE_SETUP #line 91 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SET; } YY_BREAK case 61: YY_RULE_SETUP #line 92 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SESSION; } YY_BREAK case 62: YY_RULE_SETUP #line 93 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SHOW; } YY_BREAK case 63: YY_RULE_SETUP #line 94 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SONAME; } YY_BREAK case 64: YY_RULE_SETUP #line 95 "sphinxql.l" { YYSTOREBOUNDS; return TOK_START; } YY_BREAK case 65: YY_RULE_SETUP #line 96 "sphinxql.l" { YYSTOREBOUNDS; return TOK_STATUS; } YY_BREAK case 66: YY_RULE_SETUP #line 97 "sphinxql.l" { YYSTOREBOUNDS; return TOK_SUM; } YY_BREAK case 67: YY_RULE_SETUP #line 98 "sphinxql.l" { YYSTOREBOUNDS; return TOK_TABLES; } YY_BREAK case 68: YY_RULE_SETUP #line 99 "sphinxql.l" { YYSTOREBOUNDS; return TOK_TO; } YY_BREAK case 69: YY_RULE_SETUP #line 100 "sphinxql.l" { YYSTOREBOUNDS; return TOK_TRANSACTION; } YY_BREAK case 70: YY_RULE_SETUP #line 101 "sphinxql.l" { YYSTOREBOUNDS; return TOK_TRUE; } YY_BREAK case 71: YY_RULE_SETUP #line 102 "sphinxql.l" { YYSTOREBOUNDS; return TOK_UNCOMMITTED; } YY_BREAK case 72: YY_RULE_SETUP #line 103 "sphinxql.l" { YYSTOREBOUNDS; return TOK_UPDATE; } YY_BREAK case 73: YY_RULE_SETUP #line 104 "sphinxql.l" { YYSTOREBOUNDS; return TOK_VALUES; } YY_BREAK case 74: YY_RULE_SETUP #line 105 "sphinxql.l" { YYSTOREBOUNDS; return TOK_VARIABLES; } YY_BREAK case 75: YY_RULE_SETUP #line 106 "sphinxql.l" { YYSTOREBOUNDS; return TOK_WARNINGS; } YY_BREAK case 76: YY_RULE_SETUP #line 107 "sphinxql.l" { YYSTOREBOUNDS; return TOK_WEIGHT; } YY_BREAK case 77: YY_RULE_SETUP #line 108 "sphinxql.l" { YYSTOREBOUNDS; return TOK_WHERE; } YY_BREAK case 78: YY_RULE_SETUP #line 109 "sphinxql.l" { YYSTOREBOUNDS; return TOK_WITHIN; } YY_BREAK case 79: YY_RULE_SETUP #line 111 "sphinxql.l" { YYSTOREBOUNDS; return TOK_NE; } YY_BREAK case 80: YY_RULE_SETUP #line 112 "sphinxql.l" { YYSTOREBOUNDS; return TOK_NE; } YY_BREAK case 81: YY_RULE_SETUP #line 113 "sphinxql.l" { YYSTOREBOUNDS; return TOK_LTE; } YY_BREAK case 82: YY_RULE_SETUP #line 114 "sphinxql.l" { YYSTOREBOUNDS; return TOK_GTE; } YY_BREAK case 83: YY_RULE_SETUP #line 115 "sphinxql.l" { YYSTOREBOUNDS; return '='; } YY_BREAK case 84: /* rule 84 can match eol */ YY_RULE_SETUP #line 117 "sphinxql.l" { YYSTOREBOUNDS; SqlUnescape ( lvalp->m_sValue, yytext, yyleng ); return TOK_QUOTED_STRING; } YY_BREAK case 85: YY_RULE_SETUP #line 119 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; lvalp->m_fValue = (float)strtod ( yytext, NULL ); return TOK_CONST_FLOAT; } YY_BREAK case 86: YY_RULE_SETUP #line 120 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; lvalp->m_iValue = strtoll ( yytext, NULL, 10 ); return TOK_CONST_INT; } YY_BREAK case 87: YY_RULE_SETUP #line 122 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } YY_BREAK case 88: YY_RULE_SETUP #line 123 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } YY_BREAK case 89: YY_RULE_SETUP #line 124 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } YY_BREAK case 90: YY_RULE_SETUP #line 125 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_IDENT; } YY_BREAK case 91: YY_RULE_SETUP #line 126 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_USERVAR; } YY_BREAK case 92: YY_RULE_SETUP #line 127 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_SYSVAR; } YY_BREAK case 93: YY_RULE_SETUP #line 128 "sphinxql.l" { YYSTOREBOUNDS; lvalp->m_iStart++; lvalp->m_iEnd--; lvalp->m_sValue.SetBinary ( yytext+1, strlen(yytext)-2 ); return TOK_IDENT; } YY_BREAK case 94: /* rule 94 can match eol */ YY_RULE_SETUP #line 130 "sphinxql.l" { ; } YY_BREAK case 95: YY_RULE_SETUP #line 131 "sphinxql.l" { YYSTOREBOUNDS; return yytext[0]; } YY_BREAK case 96: YY_RULE_SETUP #line 133 "sphinxql.l" ECHO; YY_BREAK #line 1547 "llsphinxql.c" case YY_STATE_EOF(INITIAL): case YY_STATE_EOF(ccomment): yyterminate(); case YY_END_OF_BUFFER: { /* Amount of text matched not including the EOB char. */ int yy_amount_of_matched_text = (int) (yy_cp - yyg->yytext_ptr) - 1; /* Undo the effects of YY_DO_BEFORE_ACTION. */ *yy_cp = yyg->yy_hold_char; YY_RESTORE_YY_MORE_OFFSET if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW ) { /* We're scanning a new file or input source. It's * possible that this happened because the user * just pointed yyin at a new source and called * yylex(). If so, then we have to assure * consistency between YY_CURRENT_BUFFER and our * globals. Here is the right place to do so, because * this is the first action (other than possibly a * back-up) that will match for the new input source. */ yyg->yy_n_chars = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; YY_CURRENT_BUFFER_LVALUE->yy_input_file = yyin; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL; } /* Note that here we test for yy_c_buf_p "<=" to the position * of the first EOB in the buffer, since yy_c_buf_p will * already have been incremented past the NUL character * (since all states make transitions on EOB to the * end-of-buffer state). Contrast this with the test * in input(). */ if ( yyg->yy_c_buf_p <= &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars] ) { /* This was really a NUL. */ yy_state_type yy_next_state; yyg->yy_c_buf_p = yyg->yytext_ptr + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state( yyscanner ); /* Okay, we're now positioned to make the NUL * transition. We couldn't have * yy_get_previous_state() go ahead and do it * for us because it doesn't know how to deal * with the possibility of jamming (and we don't * want to build jamming into it because then it * will run more slowly). */ yy_next_state = yy_try_NUL_trans( yy_current_state , yyscanner); yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; if ( yy_next_state ) { /* Consume the NUL. */ yy_cp = ++yyg->yy_c_buf_p; yy_current_state = yy_next_state; goto yy_match; } else { yy_cp = yyg->yy_c_buf_p; goto yy_find_action; } } else switch ( yy_get_next_buffer( yyscanner ) ) { case EOB_ACT_END_OF_FILE: { yyg->yy_did_buffer_switch_on_eof = 0; if ( yywrap(yyscanner ) ) { /* Note: because we've taken care in * yy_get_next_buffer() to have set up * yytext, we can now set up * yy_c_buf_p so that if some total * hoser (like flex itself) wants to * call the scanner after we return the * YY_NULL, it'll still work - another * YY_NULL will get returned. */ yyg->yy_c_buf_p = yyg->yytext_ptr + YY_MORE_ADJ; yy_act = YY_STATE_EOF(YY_START); goto do_action; } else { if ( ! yyg->yy_did_buffer_switch_on_eof ) YY_NEW_FILE; } break; } case EOB_ACT_CONTINUE_SCAN: yyg->yy_c_buf_p = yyg->yytext_ptr + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state( yyscanner ); yy_cp = yyg->yy_c_buf_p; yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; goto yy_match; case EOB_ACT_LAST_MATCH: yyg->yy_c_buf_p = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars]; yy_current_state = yy_get_previous_state( yyscanner ); yy_cp = yyg->yy_c_buf_p; yy_bp = yyg->yytext_ptr + YY_MORE_ADJ; goto yy_find_action; } break; } default: YY_FATAL_ERROR( "fatal flex scanner internal error--no action found" ); } /* end of action switch */ } /* end of scanning one token */ } /* end of yylex */ /* yy_get_next_buffer - try to read in a new buffer * * Returns a code representing an action: * EOB_ACT_LAST_MATCH - * EOB_ACT_CONTINUE_SCAN - continue scanning from current position * EOB_ACT_END_OF_FILE - end of file */ static int yy_get_next_buffer (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf; register char *source = yyg->yytext_ptr; register int number_to_move, i; int ret_val; if ( yyg->yy_c_buf_p > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars + 1] ) YY_FATAL_ERROR( "fatal flex scanner internal error--end of buffer missed" ); if ( YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0 ) { /* Don't try to fill the buffer, so this is an EOF. */ if ( yyg->yy_c_buf_p - yyg->yytext_ptr - YY_MORE_ADJ == 1 ) { /* We matched a single character, the EOB, so * treat this as a final EOF. */ return EOB_ACT_END_OF_FILE; } else { /* We matched some text prior to the EOB, first * process it. */ return EOB_ACT_LAST_MATCH; } } /* Try to read more data. */ /* First move last chars to start of buffer. */ number_to_move = (int) (yyg->yy_c_buf_p - yyg->yytext_ptr) - 1; for ( i = 0; i < number_to_move; ++i ) *(dest++) = *(source++); if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING ) /* don't do the read, it's not guaranteed to return an EOF, * just force an EOF */ YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars = 0; else { int num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; while ( num_to_read <= 0 ) { /* Not enough room in the buffer - grow it. */ /* just a shorter name for the current buffer */ YY_BUFFER_STATE b = YY_CURRENT_BUFFER; int yy_c_buf_p_offset = (int) (yyg->yy_c_buf_p - b->yy_ch_buf); if ( b->yy_is_our_buffer ) { int new_size = b->yy_buf_size * 2; if ( new_size <= 0 ) b->yy_buf_size += b->yy_buf_size / 8; else b->yy_buf_size *= 2; b->yy_ch_buf = (char *) /* Include room in for 2 EOB chars. */ yyrealloc((void *) b->yy_ch_buf,b->yy_buf_size + 2 ,yyscanner ); } else /* Can't grow it, we don't own it. */ b->yy_ch_buf = 0; if ( ! b->yy_ch_buf ) YY_FATAL_ERROR( "fatal error - scanner input buffer overflow" ); yyg->yy_c_buf_p = &b->yy_ch_buf[yy_c_buf_p_offset]; num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; } if ( num_to_read > YY_READ_BUF_SIZE ) num_to_read = YY_READ_BUF_SIZE; /* Read in more data. */ YY_INPUT( (&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]), yyg->yy_n_chars, num_to_read ); YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } if ( yyg->yy_n_chars == 0 ) { if ( number_to_move == YY_MORE_ADJ ) { ret_val = EOB_ACT_END_OF_FILE; yyrestart(yyin ,yyscanner); } else { ret_val = EOB_ACT_LAST_MATCH; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_EOF_PENDING; } } else ret_val = EOB_ACT_CONTINUE_SCAN; if ((yy_size_t) (yyg->yy_n_chars + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) { /* Extend the array by 50%, plus the number we really need. */ yy_size_t new_size = yyg->yy_n_chars + number_to_move + (yyg->yy_n_chars >> 1); YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char *) yyrealloc((void *) YY_CURRENT_BUFFER_LVALUE->yy_ch_buf,new_size ,yyscanner ); if ( ! YY_CURRENT_BUFFER_LVALUE->yy_ch_buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_get_next_buffer()" ); } yyg->yy_n_chars += number_to_move; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars] = YY_END_OF_BUFFER_CHAR; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars + 1] = YY_END_OF_BUFFER_CHAR; yyg->yytext_ptr = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0]; return ret_val; } /* yy_get_previous_state - get the state just before the EOB char was reached */ static yy_state_type yy_get_previous_state (yyscan_t yyscanner) { register yy_state_type yy_current_state; register char *yy_cp; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yy_current_state = yyg->yy_start; for ( yy_cp = yyg->yytext_ptr + YY_MORE_ADJ; yy_cp < yyg->yy_c_buf_p; ++yy_cp ) { register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1); if ( yy_accept[yy_current_state] ) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 374 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; } return yy_current_state; } /* yy_try_NUL_trans - try to make a transition on the NUL character * * synopsis * next_state = yy_try_NUL_trans( current_state ); */ static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state , yyscan_t yyscanner) { register int yy_is_jam; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* This var may be unused depending upon options. */ register char *yy_cp = yyg->yy_c_buf_p; register YY_CHAR yy_c = 1; if ( yy_accept[yy_current_state] ) { yyg->yy_last_accepting_state = yy_current_state; yyg->yy_last_accepting_cpos = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 374 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; yy_is_jam = (yy_current_state == 373); return yy_is_jam ? 0 : yy_current_state; } #ifndef YY_NO_INPUT #ifdef __cplusplus static int yyinput (yyscan_t yyscanner) #else static int input (yyscan_t yyscanner) #endif { int c; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; *yyg->yy_c_buf_p = yyg->yy_hold_char; if ( *yyg->yy_c_buf_p == YY_END_OF_BUFFER_CHAR ) { /* yy_c_buf_p now points to the character we want to return. * If this occurs *before* the EOB characters, then it's a * valid NUL; if not, then we've hit the end of the buffer. */ if ( yyg->yy_c_buf_p < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[yyg->yy_n_chars] ) /* This was really a NUL. */ *yyg->yy_c_buf_p = '\0'; else { /* need more input */ int offset = yyg->yy_c_buf_p - yyg->yytext_ptr; ++yyg->yy_c_buf_p; switch ( yy_get_next_buffer( yyscanner ) ) { case EOB_ACT_LAST_MATCH: /* This happens because yy_g_n_b() * sees that we've accumulated a * token and flags that we need to * try matching the token before * proceeding. But for input(), * there's no matching to consider. * So convert the EOB_ACT_LAST_MATCH * to EOB_ACT_END_OF_FILE. */ /* Reset buffer status. */ yyrestart(yyin ,yyscanner); /*FALLTHROUGH*/ case EOB_ACT_END_OF_FILE: { if ( yywrap(yyscanner ) ) return EOF; if ( ! yyg->yy_did_buffer_switch_on_eof ) YY_NEW_FILE; #ifdef __cplusplus return yyinput(yyscanner); #else return input(yyscanner); #endif } case EOB_ACT_CONTINUE_SCAN: yyg->yy_c_buf_p = yyg->yytext_ptr + offset; break; } } } c = *(unsigned char *) yyg->yy_c_buf_p; /* cast for 8-bit char's */ *yyg->yy_c_buf_p = '\0'; /* preserve yytext */ yyg->yy_hold_char = *++yyg->yy_c_buf_p; return c; } #endif /* ifndef YY_NO_INPUT */ /** Immediately switch to a different input stream. * @param input_file A readable stream. * @param yyscanner The scanner object. * @note This function does not reset the start condition to @c INITIAL . */ void yyrestart (FILE * input_file , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if ( ! YY_CURRENT_BUFFER ){ yyensure_buffer_stack (yyscanner); YY_CURRENT_BUFFER_LVALUE = yy_create_buffer(yyin,YY_BUF_SIZE ,yyscanner); } yy_init_buffer(YY_CURRENT_BUFFER,input_file ,yyscanner); yy_load_buffer_state(yyscanner ); } /** Switch to a different input buffer. * @param new_buffer The new input buffer. * @param yyscanner The scanner object. */ void yy_switch_to_buffer (YY_BUFFER_STATE new_buffer , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* TODO. We should be able to replace this entire function body * with * yypop_buffer_state(); * yypush_buffer_state(new_buffer); */ yyensure_buffer_stack (yyscanner); if ( YY_CURRENT_BUFFER == new_buffer ) return; if ( YY_CURRENT_BUFFER ) { /* Flush out information for old buffer. */ *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } YY_CURRENT_BUFFER_LVALUE = new_buffer; yy_load_buffer_state(yyscanner ); /* We don't actually know whether we did this switch during * EOF (yywrap()) processing, but the only time this flag * is looked at is after yywrap() is called, so it's safe * to go ahead and always set it. */ yyg->yy_did_buffer_switch_on_eof = 1; } static void yy_load_buffer_state (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yyg->yy_n_chars = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; yyg->yytext_ptr = yyg->yy_c_buf_p = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos; yyin = YY_CURRENT_BUFFER_LVALUE->yy_input_file; yyg->yy_hold_char = *yyg->yy_c_buf_p; } /** Allocate and initialize an input buffer state. * @param file A readable stream. * @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE. * @param yyscanner The scanner object. * @return the allocated buffer state. */ YY_BUFFER_STATE yy_create_buffer (FILE * file, int size , yyscan_t yyscanner) { YY_BUFFER_STATE b; b = (YY_BUFFER_STATE) yyalloc(sizeof( struct yy_buffer_state ) ,yyscanner ); if ( ! b ) YY_FATAL_ERROR( "out of dynamic memory in yy_create_buffer()" ); b->yy_buf_size = size; /* yy_ch_buf has to be 2 characters longer than the size given because * we need to put in 2 end-of-buffer characters. */ b->yy_ch_buf = (char *) yyalloc(b->yy_buf_size + 2 ,yyscanner ); if ( ! b->yy_ch_buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_create_buffer()" ); b->yy_is_our_buffer = 1; yy_init_buffer(b,file ,yyscanner); return b; } /** Destroy the buffer. * @param b a buffer created with yy_create_buffer() * @param yyscanner The scanner object. */ void yy_delete_buffer (YY_BUFFER_STATE b , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if ( ! b ) return; if ( b == YY_CURRENT_BUFFER ) /* Not sure if we should pop here. */ YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE) 0; if ( b->yy_is_our_buffer ) yyfree((void *) b->yy_ch_buf ,yyscanner ); yyfree((void *) b ,yyscanner ); } #ifndef __cplusplus extern int isatty (int ); #endif /* __cplusplus */ /* Initializes or reinitializes a buffer. * This function is sometimes called more than once on the same buffer, * such as during a yyrestart() or at EOF. */ static void yy_init_buffer (YY_BUFFER_STATE b, FILE * file , yyscan_t yyscanner) { int oerrno = errno; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yy_flush_buffer(b ,yyscanner); b->yy_input_file = file; b->yy_fill_buffer = 1; /* If b is the current buffer, then yy_init_buffer was _probably_ * called from yyrestart() or through yy_get_next_buffer. * In that case, we don't want to reset the lineno or column. */ if (b != YY_CURRENT_BUFFER){ b->yy_bs_lineno = 1; b->yy_bs_column = 0; } b->yy_is_interactive = file ? (isatty( fileno(file) ) > 0) : 0; errno = oerrno; } /** Discard all buffered characters. On the next scan, YY_INPUT will be called. * @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER. * @param yyscanner The scanner object. */ void yy_flush_buffer (YY_BUFFER_STATE b , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if ( ! b ) return; b->yy_n_chars = 0; /* We always need two end-of-buffer characters. The first causes * a transition to the end-of-buffer state. The second causes * a jam in that state. */ b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR; b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR; b->yy_buf_pos = &b->yy_ch_buf[0]; b->yy_at_bol = 1; b->yy_buffer_status = YY_BUFFER_NEW; if ( b == YY_CURRENT_BUFFER ) yy_load_buffer_state(yyscanner ); } /** Pushes the new state onto the stack. The new state becomes * the current state. This function will allocate the stack * if necessary. * @param new_buffer The new state. * @param yyscanner The scanner object. */ void yypush_buffer_state (YY_BUFFER_STATE new_buffer , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if (new_buffer == NULL) return; yyensure_buffer_stack(yyscanner); /* This block is copied from yy_switch_to_buffer. */ if ( YY_CURRENT_BUFFER ) { /* Flush out information for old buffer. */ *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } /* Only push if top exists. Otherwise, replace top. */ if (YY_CURRENT_BUFFER) yyg->yy_buffer_stack_top++; YY_CURRENT_BUFFER_LVALUE = new_buffer; /* copied from yy_switch_to_buffer. */ yy_load_buffer_state(yyscanner ); yyg->yy_did_buffer_switch_on_eof = 1; } /** Removes and deletes the top of the stack, if present. * The next element becomes the new top. * @param yyscanner The scanner object. */ void yypop_buffer_state (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if (!YY_CURRENT_BUFFER) return; yy_delete_buffer(YY_CURRENT_BUFFER ,yyscanner); YY_CURRENT_BUFFER_LVALUE = NULL; if (yyg->yy_buffer_stack_top > 0) --yyg->yy_buffer_stack_top; if (YY_CURRENT_BUFFER) { yy_load_buffer_state(yyscanner ); yyg->yy_did_buffer_switch_on_eof = 1; } } /* Allocates the stack if it does not exist. * Guarantees space for at least one push. */ static void yyensure_buffer_stack (yyscan_t yyscanner) { int num_to_alloc; struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if (!yyg->yy_buffer_stack) { /* First allocation is just for 2 elements, since we don't know if this * scanner will even need a stack. We use 2 instead of 1 to avoid an * immediate realloc on the next call. */ num_to_alloc = 1; yyg->yy_buffer_stack = (struct yy_buffer_state**)yyalloc (num_to_alloc * sizeof(struct yy_buffer_state*) , yyscanner); if ( ! yyg->yy_buffer_stack ) YY_FATAL_ERROR( "out of dynamic memory in yyensure_buffer_stack()" ); memset(yyg->yy_buffer_stack, 0, num_to_alloc * sizeof(struct yy_buffer_state*)); yyg->yy_buffer_stack_max = num_to_alloc; yyg->yy_buffer_stack_top = 0; return; } if (yyg->yy_buffer_stack_top >= (yyg->yy_buffer_stack_max) - 1){ /* Increase the buffer to prepare for a possible push. */ int grow_size = 8 /* arbitrary grow size */; num_to_alloc = yyg->yy_buffer_stack_max + grow_size; yyg->yy_buffer_stack = (struct yy_buffer_state**)yyrealloc (yyg->yy_buffer_stack, num_to_alloc * sizeof(struct yy_buffer_state*) , yyscanner); if ( ! yyg->yy_buffer_stack ) YY_FATAL_ERROR( "out of dynamic memory in yyensure_buffer_stack()" ); /* zero only the new slots.*/ memset(yyg->yy_buffer_stack + yyg->yy_buffer_stack_max, 0, grow_size * sizeof(struct yy_buffer_state*)); yyg->yy_buffer_stack_max = num_to_alloc; } } /** Setup the input buffer state to scan directly from a user-specified character buffer. * @param base the character buffer * @param size the size in bytes of the character buffer * @param yyscanner The scanner object. * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_scan_buffer (char * base, yy_size_t size , yyscan_t yyscanner) { YY_BUFFER_STATE b; if ( size < 2 || base[size-2] != YY_END_OF_BUFFER_CHAR || base[size-1] != YY_END_OF_BUFFER_CHAR ) /* They forgot to leave room for the EOB's. */ return 0; b = (YY_BUFFER_STATE) yyalloc(sizeof( struct yy_buffer_state ) ,yyscanner ); if ( ! b ) YY_FATAL_ERROR( "out of dynamic memory in yy_scan_buffer()" ); b->yy_buf_size = size - 2; /* "- 2" to take care of EOB's */ b->yy_buf_pos = b->yy_ch_buf = base; b->yy_is_our_buffer = 0; b->yy_input_file = 0; b->yy_n_chars = b->yy_buf_size; b->yy_is_interactive = 0; b->yy_at_bol = 1; b->yy_fill_buffer = 0; b->yy_buffer_status = YY_BUFFER_NEW; yy_switch_to_buffer(b ,yyscanner ); return b; } /** Setup the input buffer state to scan a string. The next call to yylex() will * scan from a @e copy of @a str. * @param yystr a NUL-terminated string to scan * @param yyscanner The scanner object. * @return the newly allocated buffer state object. * @note If you want to scan bytes that may contain NUL values, then use * yy_scan_bytes() instead. */ YY_BUFFER_STATE yy_scan_string (yyconst char * yystr , yyscan_t yyscanner) { return yy_scan_bytes(yystr,strlen(yystr) ,yyscanner); } /** Setup the input buffer state to scan the given bytes. The next call to yylex() will * scan from a @e copy of @a bytes. * @param bytes the byte buffer to scan * @param len the number of bytes in the buffer pointed to by @a bytes. * @param yyscanner The scanner object. * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_scan_bytes (yyconst char * yybytes, int _yybytes_len , yyscan_t yyscanner) { YY_BUFFER_STATE b; char *buf; yy_size_t n; int i; /* Get memory for full buffer, including space for trailing EOB's. */ n = _yybytes_len + 2; buf = (char *) yyalloc(n ,yyscanner ); if ( ! buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_scan_bytes()" ); for ( i = 0; i < _yybytes_len; ++i ) buf[i] = yybytes[i]; buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR; b = yy_scan_buffer(buf,n ,yyscanner); if ( ! b ) YY_FATAL_ERROR( "bad buffer in yy_scan_bytes()" ); /* It's okay to grow etc. this buffer, and we should throw it * away when we're done. */ b->yy_is_our_buffer = 1; return b; } #ifndef YY_EXIT_FAILURE #define YY_EXIT_FAILURE 2 #endif static void yy_fatal_error (yyconst char* msg , yyscan_t yyscanner) { (void) fprintf( stderr, "%s\n", msg ); exit( YY_EXIT_FAILURE ); } /* Redefine yyless() so it works in section 3 code. */ #undef yyless #define yyless(n) \ do \ { \ /* Undo effects of setting up yytext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ yytext[yyleng] = yyg->yy_hold_char; \ yyg->yy_c_buf_p = yytext + yyless_macro_arg; \ yyg->yy_hold_char = *yyg->yy_c_buf_p; \ *yyg->yy_c_buf_p = '\0'; \ yyleng = yyless_macro_arg; \ } \ while ( 0 ) /* Accessor methods (get/set functions) to struct members. */ /** Get the user-defined data for this scanner. * @param yyscanner The scanner object. */ YY_EXTRA_TYPE yyget_extra (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yyextra; } /** Get the current line number. * @param yyscanner The scanner object. */ int yyget_lineno (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if (! YY_CURRENT_BUFFER) return 0; return yylineno; } /** Get the current column number. * @param yyscanner The scanner object. */ int yyget_column (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if (! YY_CURRENT_BUFFER) return 0; return yycolumn; } /** Get the input stream. * @param yyscanner The scanner object. */ FILE *yyget_in (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yyin; } /** Get the output stream. * @param yyscanner The scanner object. */ FILE *yyget_out (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yyout; } /** Get the length of the current token. * @param yyscanner The scanner object. */ int yyget_leng (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yyleng; } /** Get the current token. * @param yyscanner The scanner object. */ char *yyget_text (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yytext; } /** Set the user-defined data. This data is never touched by the scanner. * @param user_defined The data to be associated with this scanner. * @param yyscanner The scanner object. */ void yyset_extra (YY_EXTRA_TYPE user_defined , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yyextra = user_defined ; } /** Set the current line number. * @param line_number * @param yyscanner The scanner object. */ void yyset_lineno (int line_number , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* lineno is only valid if an input buffer exists. */ if (! YY_CURRENT_BUFFER ) yy_fatal_error( "yyset_lineno called with no buffer" , yyscanner); yylineno = line_number; } /** Set the current column. * @param line_number * @param yyscanner The scanner object. */ void yyset_column (int column_no , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* column is only valid if an input buffer exists. */ if (! YY_CURRENT_BUFFER ) yy_fatal_error( "yyset_column called with no buffer" , yyscanner); yycolumn = column_no; } /** Set the input stream. This does not discard the current * input buffer. * @param in_str A readable stream. * @param yyscanner The scanner object. * @see yy_switch_to_buffer */ void yyset_in (FILE * in_str , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yyin = in_str ; } void yyset_out (FILE * out_str , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yyout = out_str ; } int yyget_debug (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; return yy_flex_debug; } void yyset_debug (int bdebug , yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; yy_flex_debug = bdebug ; } /* Accessor methods for yylval and yylloc */ /* User-visible API */ /* yylex_init is special because it creates the scanner itself, so it is * the ONLY reentrant function that doesn't take the scanner as the last argument. * That's why we explicitly handle the declaration, instead of using our macros. */ int yylex_init(yyscan_t* ptr_yy_globals) { if (ptr_yy_globals == NULL){ errno = EINVAL; return 1; } *ptr_yy_globals = (yyscan_t) yyalloc ( sizeof( struct yyguts_t ), NULL ); if (*ptr_yy_globals == NULL){ errno = ENOMEM; return 1; } /* By setting to 0xAA, we expose bugs in yy_init_globals. Leave at 0x00 for releases. */ memset(*ptr_yy_globals,0x00,sizeof(struct yyguts_t)); return yy_init_globals ( *ptr_yy_globals ); } /* yylex_init_extra has the same functionality as yylex_init, but follows the * convention of taking the scanner as the last argument. Note however, that * this is a *pointer* to a scanner, as it will be allocated by this call (and * is the reason, too, why this function also must handle its own declaration). * The user defined value in the first argument will be available to yyalloc in * the yyextra field. */ int yylex_init_extra(YY_EXTRA_TYPE yy_user_defined,yyscan_t* ptr_yy_globals ) { struct yyguts_t dummy_yyguts; yyset_extra (yy_user_defined, &dummy_yyguts); if (ptr_yy_globals == NULL){ errno = EINVAL; return 1; } *ptr_yy_globals = (yyscan_t) yyalloc ( sizeof( struct yyguts_t ), &dummy_yyguts ); if (*ptr_yy_globals == NULL){ errno = ENOMEM; return 1; } /* By setting to 0xAA, we expose bugs in yy_init_globals. Leave at 0x00 for releases. */ memset(*ptr_yy_globals,0x00,sizeof(struct yyguts_t)); yyset_extra (yy_user_defined, *ptr_yy_globals); return yy_init_globals ( *ptr_yy_globals ); } static int yy_init_globals (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* Initialization is the same as for the non-reentrant scanner. * This function is called from yylex_destroy(), so don't allocate here. */ yyg->yy_buffer_stack = 0; yyg->yy_buffer_stack_top = 0; yyg->yy_buffer_stack_max = 0; yyg->yy_c_buf_p = (char *) 0; yyg->yy_init = 0; yyg->yy_start = 0; yyg->yy_start_stack_ptr = 0; yyg->yy_start_stack_depth = 0; yyg->yy_start_stack = NULL; /* Defined in main.c */ #ifdef YY_STDINIT yyin = stdin; yyout = stdout; #else yyin = (FILE *) 0; yyout = (FILE *) 0; #endif /* For future reference: Set errno on error, since we are called by * yylex_init() */ return 0; } /* yylex_destroy is for both reentrant and non-reentrant scanners. */ int yylex_destroy (yyscan_t yyscanner) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; /* Pop the buffer stack, destroying each element. */ while(YY_CURRENT_BUFFER){ yy_delete_buffer(YY_CURRENT_BUFFER ,yyscanner ); YY_CURRENT_BUFFER_LVALUE = NULL; yypop_buffer_state(yyscanner); } /* Destroy the stack itself. */ yyfree(yyg->yy_buffer_stack ,yyscanner); yyg->yy_buffer_stack = NULL; /* Destroy the start condition stack. */ yyfree(yyg->yy_start_stack ,yyscanner ); yyg->yy_start_stack = NULL; /* Reset the globals. This is important in a non-reentrant scanner so the next time * yylex() is called, initialization will occur. */ yy_init_globals( yyscanner); /* Destroy the main struct (reentrant only). */ yyfree ( yyscanner , yyscanner ); yyscanner = NULL; return 0; } /* * Internal utility routines. */ #ifndef yytext_ptr static void yy_flex_strncpy (char* s1, yyconst char * s2, int n , yyscan_t yyscanner) { register int i; for ( i = 0; i < n; ++i ) s1[i] = s2[i]; } #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen (yyconst char * s , yyscan_t yyscanner) { register int n; for ( n = 0; s[n]; ++n ) ; return n; } #endif void *yyalloc (yy_size_t size , yyscan_t yyscanner) { return (void *) malloc( size ); } void *yyrealloc (void * ptr, yy_size_t size , yyscan_t yyscanner) { /* The cast to (char *) in the following accommodates both * implementations that use char* generic pointers, and those * that use void* generic pointers. It works with the latter * because both ANSI C and C++ allow castless assignment from * any pointer type to void*, and deal with argument conversions * as though doing an assignment. */ return (void *) realloc( (char *) ptr, size ); } void yyfree (void * ptr , yyscan_t yyscanner) { free( (char *) ptr ); /* see yyrealloc() for (char *) cast */ } #define YYTABLES_NAME "yytables" #line 133 "sphinxql.l" // warning, lexer generator dependent! // flex inserts trailing zero as needed into the buffer when lexing // but we need that rolled back when doing error reporting from yyerror void yylex_unhold ( yyscan_t yyscanner ) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if ( YY_CURRENT_BUFFER ) { *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } } #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxquery.y0000644000176700017710000000764011720727645017345 0ustar deogardeogar%{ #if USE_WINDOWS #pragma warning(push,1) #endif %} %lex-param { XQParser_t * pParser } %parse-param { XQParser_t * pParser } %pure-parser %error-verbose %union { XQNode_t * pNode; // tree node struct { int iValue; int iStrIndex; } tInt; struct // field spec { CSphSmallBitvec dMask; // acceptable fields mask int iMaxPos; // max allowed position within field } tFieldLimit; int iZoneVec; }; %token TOK_KEYWORD %token TOK_NEAR %token TOK_INT %token TOK_FIELDLIMIT %token TOK_ZONE %token TOK_BEFORE %token TOK_SENTENCE %token TOK_PARAGRAPH %type rawkeyword %type keyword %type phrasetoken %type phrase %type sp_item %type sentence %type paragraph %type atom %type orlist %type orlistf %type beforelist %type expr %left TOK_BEFORE TOK_NEAR %% query: expr { pParser->AddQuery ( $1 ); } ; expr: beforelist { $$ = $1; } | expr beforelist { $$ = pParser->AddOp ( SPH_QUERY_AND, $1, $2 ); } ; tok_limiter: // empty | TOK_FIELDLIMIT { pParser->SetFieldSpec ( $1.dMask, $1.iMaxPos ); } | TOK_ZONE { pParser->SetZoneVec ( $1 ); } ; beforelist: orlistf { $$ = $1; } | beforelist TOK_BEFORE orlistf { $$ = pParser->AddOp ( SPH_QUERY_BEFORE, $1, $3 ); } | beforelist TOK_NEAR orlistf { $$ = pParser->AddOp ( SPH_QUERY_NEAR, $1, $3, $2.iValue ); } ; orlistf: orlist { $$ = $1; } | tok_limiter '-' orlist { $$ = pParser->AddOp ( SPH_QUERY_NOT, $3, NULL ); } ; orlist: tok_limiter atom { $$ = $2; } | orlist '|' tok_limiter atom { $$ = pParser->AddOp ( SPH_QUERY_OR, $1, $4 ); } ; atom: keyword { $$ = $1; } | sentence { $$ = $1; } | paragraph { $$ = $1; } | '"' '"' { $$ = NULL; } | '"' '"' '~' TOK_INT { $$ = NULL; } | '"' '"' '/' TOK_INT { $$ = NULL; } | '"' phrase '"' { $$ = $2; if ( $$ ) { assert ( $$->m_dWords.GetLength() ); $$->SetOp ( SPH_QUERY_PHRASE); } } | '"' phrase '"' '~' TOK_INT { $$ = $2; if ( $$ ) { assert ( $$->m_dWords.GetLength() ); $$->SetOp ( SPH_QUERY_PROXIMITY ); $$->m_iOpArg = $5.iValue; } } | '"' phrase '"' '/' TOK_INT { $$ = $2; if ( $$ ) { assert ( $$->m_dWords.GetLength() ); $$->SetOp ( SPH_QUERY_QUORUM ); $$->m_iOpArg = $5.iValue; } } | '(' expr ')' { $$ = $2; if ( $$ ) $$->m_dSpec.Hide(); pParser->m_dStateSpec.Reset(); } ; keyword: rawkeyword | rawkeyword '$' { $$ = $1; assert ( $$->m_dWords.GetLength()==1 ); $$->m_dWords[0].m_bFieldEnd = true; } | '^' rawkeyword { $$ = $2; assert ( $$->m_dWords.GetLength()==1 ); $$->m_dWords[0].m_bFieldStart = true; } | '^' rawkeyword '$' { $$ = $2; assert ( $$->m_dWords.GetLength()==1 ); $$->m_dWords[0].m_bFieldStart = true; $$->m_dWords[0].m_bFieldEnd = true; } ; rawkeyword: TOK_KEYWORD { $$ = $1; } | TOK_INT { $$ = pParser->AddKeyword ( ( $1.iStrIndex>=0 ) ? pParser->m_dIntTokens[$1.iStrIndex].cstr() : NULL ); } ; sentence: sp_item TOK_SENTENCE sp_item { $$ = pParser->AddOp ( SPH_QUERY_SENTENCE, $1, $3 ); } | sentence TOK_SENTENCE sp_item { $$ = pParser->AddOp ( SPH_QUERY_SENTENCE, $1, $3 ); } ; paragraph: sp_item TOK_PARAGRAPH sp_item { $$ = pParser->AddOp ( SPH_QUERY_PARAGRAPH, $1, $3 ); } | paragraph TOK_PARAGRAPH sp_item { $$ = pParser->AddOp ( SPH_QUERY_PARAGRAPH, $1, $3 ); } ; sp_item: keyword { $$ = $1; } | '"' phrase '"' { $$ = $2; if ( $$ ) { assert ( $$->m_dWords.GetLength() ); $$->SetOp ( SPH_QUERY_PHRASE); } } ; phrase: phrasetoken { $$ = $1; } | phrase phrasetoken { $$ = pParser->AddKeyword ( $1, $2 ); } ; phrasetoken: keyword { $$ = $1; } | '(' { $$ = NULL; } | ')' { $$ = NULL; } | '-' { $$ = NULL; } | '|' { $$ = NULL; } | '~' { $$ = NULL; } | '/' { $$ = NULL; } ; %% #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxfilter.cpp0000644000176700017710000003661011711621267017767 0ustar deogardeogar// // $Id: sphinxfilter.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinxfilter.h" #include "sphinxint.h" #if USE_WINDOWS #pragma warning(disable:4250) // inheritance via dominance is our intent #endif /// attribute-based struct IFilter_Attr: virtual ISphFilter { CSphAttrLocator m_tLocator; virtual void SetLocator ( const CSphAttrLocator & tLocator ) { m_tLocator = tLocator; } }; /// values struct IFilter_Values: virtual ISphFilter { const SphAttr_t * m_pValues; int m_iValueCount; IFilter_Values () : m_pValues ( NULL ) , m_iValueCount ( 0 ) {} virtual void SetValues ( const SphAttr_t * pStorage, int iCount ) { assert ( pStorage ); assert ( iCount > 0 ); #ifndef NDEBUG // values must be sorted for ( int i = 1; i < iCount; i++ ) assert ( pStorage[i-1]<=pStorage[i] ); #endif m_pValues = pStorage; m_iValueCount = iCount; } inline const SphAttr_t GetValue ( int iIndex ) const { assert ( iIndex>=0 && iIndex(*pB) ) return false; while ( pB-pA>1 ) { const SphAttr_t * pM = pA + ((pB-pA)/2); if ( uValue==(*pM) ) return true; if ( uValue<(*pM) ) pB = pM; else pA = pM; } return false; } // OPTIMIZE: use binary search bool IFilter_Values::EvalBlockValues ( SphAttr_t uBlockMin, SphAttr_t uBlockMax ) const { // is any of our values inside the block? for ( int i = 0; i < m_iValueCount; i++ ) if ( GetValue(i)>=uBlockMin && GetValue(i)<=uBlockMax ) return true; return false; } /// range struct IFilter_Range: virtual ISphFilter { SphAttr_t m_uMinValue; SphAttr_t m_uMaxValue; virtual void SetRange ( SphAttr_t tMin, SphAttr_t tMax ) { m_uMinValue = tMin; m_uMaxValue = tMax; } bool EvalRange ( const SphAttr_t uValue ) const { return uValue>=m_uMinValue && uValue<=m_uMaxValue; } }; /// MVA struct IFilter_MVA: virtual IFilter_Attr { const DWORD * m_pMvaStorage; IFilter_MVA () : m_pMvaStorage ( NULL ) {} virtual void SetMVAStorage ( const DWORD * pMva ) { m_pMvaStorage = pMva; } inline bool LoadMVA ( const CSphMatch & tMatch, const DWORD ** pMva, const DWORD ** pMvaMax ) const { assert ( m_pMvaStorage ); *pMva = tMatch.GetAttrMVA ( m_tLocator, m_pMvaStorage ); if ( !*pMva ) return false; *pMvaMax = *pMva + (**pMva) + 1; (*pMva)++; return true; } }; /// filters // attr struct Filter_Values: public IFilter_Attr, IFilter_Values { virtual bool Eval ( const CSphMatch & tMatch ) const { return EvalValues ( tMatch.GetAttr ( m_tLocator ) ); } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { if ( m_tLocator.m_bDynamic ) return true; // ignore computed attributes SphAttr_t uBlockMin = sphGetRowAttr ( DOCINFO2ATTRS ( pMinDocinfo ), m_tLocator ); SphAttr_t uBlockMax = sphGetRowAttr ( DOCINFO2ATTRS ( pMaxDocinfo ), m_tLocator ); return EvalBlockValues ( uBlockMin, uBlockMax ); } }; struct Filter_Range: public IFilter_Attr, IFilter_Range { virtual bool Eval ( const CSphMatch & tMatch ) const { return EvalRange ( tMatch.GetAttr ( m_tLocator ) ); } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { if ( m_tLocator.m_bDynamic ) return true; // ignore computed attributes SphAttr_t uBlockMin = sphGetRowAttr ( DOCINFO2ATTRS ( pMinDocinfo ), m_tLocator ); SphAttr_t uBlockMax = sphGetRowAttr ( DOCINFO2ATTRS ( pMaxDocinfo ), m_tLocator ); return (!( m_uMaxValueuBlockMax )); // not-reject } }; // float struct Filter_FloatRange: public IFilter_Attr { float m_fMinValue; float m_fMaxValue; virtual void SetRangeFloat ( float fMin, float fMax ) { m_fMinValue = fMin; m_fMaxValue = fMax; } virtual bool Eval ( const CSphMatch & tMatch ) const { const float & fValue = tMatch.GetAttrFloat ( m_tLocator ); return fValue>=m_fMinValue && fValue<=m_fMaxValue; } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { if ( m_tLocator.m_bDynamic ) return true; // ignore computed attributes float fBlockMin = sphDW2F ( (DWORD)sphGetRowAttr ( DOCINFO2ATTRS ( pMinDocinfo ), m_tLocator ) ); float fBlockMax = sphDW2F ( (DWORD)sphGetRowAttr ( DOCINFO2ATTRS ( pMaxDocinfo ), m_tLocator ) ); return (!( m_fMaxValuefBlockMax )); // not-reject } }; // id struct Filter_IdValues: public IFilter_Values { virtual bool Eval ( const CSphMatch & tMatch ) const { return EvalValues ( tMatch.m_iDocID ); } bool EvalBlockValues ( SphAttr_t uBlockMin, SphAttr_t uBlockMax ) const { // is any of our values inside the block? for ( int i = 0; i < m_iValueCount; i++ ) if ( (SphDocID_t)GetValue(i)>=(SphDocID_t)uBlockMin && (SphDocID_t)GetValue(i)<=(SphDocID_t)uBlockMax ) return true; return false; } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { const SphAttr_t uBlockMin = DOCINFO2ID ( pMinDocinfo ); const SphAttr_t uBlockMax = DOCINFO2ID ( pMaxDocinfo ); return EvalBlockValues ( uBlockMin, uBlockMax ); } Filter_IdValues () { m_bUsesAttrs = false; } }; struct Filter_IdRange: public IFilter_Range { virtual bool Eval ( const CSphMatch & tMatch ) const { const SphDocID_t uID = tMatch.m_iDocID; return uID>=(SphDocID_t)m_uMinValue && uID<=(SphDocID_t)m_uMaxValue; } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { const SphDocID_t uBlockMin = DOCINFO2ID ( pMinDocinfo ); const SphDocID_t uBlockMax = DOCINFO2ID ( pMaxDocinfo ); return (!( (SphDocID_t)m_uMaxValueuBlockMax )); } Filter_IdRange () { m_bUsesAttrs = false; } }; // weight struct Filter_WeightValues: public IFilter_Values { virtual bool Eval ( const CSphMatch & tMatch ) const { return EvalValues ( tMatch.m_iWeight ); } Filter_WeightValues () { m_bUsesAttrs = false; } }; struct Filter_WeightRange: public IFilter_Range { virtual bool IsEarly () { return false; } virtual bool Eval ( const CSphMatch & tMatch ) const { return EvalRange ( tMatch.m_iWeight ); } Filter_WeightRange () { m_bUsesAttrs = false; } }; // MVA template < bool IS_MVA64 > struct Filter_MVAValues: public IFilter_MVA, IFilter_Values { virtual bool Eval ( const CSphMatch & tMatch ) const { const DWORD * pMva, * pMvaMax; if ( !LoadMVA ( tMatch, &pMva, &pMvaMax ) ) return false; return MvaEval ( pMva, pMvaMax ); } bool MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const; }; template<> bool Filter_MVAValues::MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const { const SphAttr_t * pFilter = m_pValues; const SphAttr_t * pFilterMax = pFilter + m_iValueCount; const DWORD * L = pMva; const DWORD * R = pMvaMax - 1; for ( ; pFilter < pFilterMax; pFilter++ ) { while ( L<=R ) { const DWORD * m = L + (R - L) / 2; if ( *pFilter > *m ) L = m + 1; else if ( *pFilter < *m ) R = m - 1; else return true; } R = pMvaMax - 1; } return false; } template<> bool Filter_MVAValues::MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const { const SphAttr_t * pFilter = m_pValues; const SphAttr_t * pFilterMax = pFilter + m_iValueCount; const uint64_t * L = (const uint64_t *)pMva; const uint64_t * R = (const uint64_t *)( pMvaMax - 2 ); for ( ; pFilter < pFilterMax; pFilter++ ) { uint64_t uFilter = *pFilter; while ( L<=R ) { const uint64_t * pVal = L + (R - L) / 2; uint64_t uMva = MVA_UPSIZE ( (const DWORD *)pVal ); if ( uFilter > uMva ) L = pVal + 1; else if ( uFilter < uMva ) R = pVal - 1; else return true; } R = (const uint64_t *)( pMvaMax - 2 ); } return false; } template < bool IS_MVA64 > struct Filter_MVARange: public IFilter_MVA, IFilter_Range { virtual bool Eval ( const CSphMatch & tMatch ) const { const DWORD * pMva, * pMvaMax; if ( !LoadMVA ( tMatch, &pMva, &pMvaMax ) ) return false; return MvaEval ( pMva, pMvaMax ); } bool MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const; }; template<> bool Filter_MVARange::MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const { const DWORD * L = pMva; const DWORD * R = pMvaMax - 1; while ( L<=R ) { const DWORD * m = L + (R - L) / 2; if ( m_uMinValue > *m ) L = m + 1; else if ( m_uMinValue < *m ) R = m - 1; else return true; } if ( L==pMvaMax ) return false; return *L<=m_uMaxValue; } template<> bool Filter_MVARange::MvaEval ( const DWORD * pMva, const DWORD * pMvaMax ) const { const uint64_t * L = (const uint64_t *)pMva; const uint64_t * R = (const uint64_t *)( pMvaMax - 2 ); while ( L<=R ) { const uint64_t * pVal = L + (R - L) / 2; uint64_t uMva = MVA_UPSIZE ( (const DWORD *)pVal ); if ( (uint64_t)m_uMinValue>uMva ) L = pVal + 1; else if ( (uint64_t)m_uMinValue < uMva ) R = pVal - 1; else return true; } if ( L==(const uint64_t *)pMvaMax ) return false; uint64_t uMvaL = MVA_UPSIZE ( (const DWORD *)L ); return uMvaL<=(uint64_t)m_uMaxValue; } // and struct Filter_And: public ISphFilter { CSphVector m_dFilters; ~Filter_And () { ARRAY_FOREACH ( i, m_dFilters ) SafeDelete ( m_dFilters[i] ); } void Add ( ISphFilter * pFilter ) { m_dFilters.Add ( pFilter ); m_bUsesAttrs |= pFilter->UsesAttrs(); } virtual bool Eval ( const CSphMatch & tMatch ) const { ARRAY_FOREACH ( i, m_dFilters ) if ( !m_dFilters[i]->Eval ( tMatch ) ) return false; return true; } virtual bool EvalBlock ( const DWORD * pMinDocinfo, const DWORD * pMaxDocinfo ) const { ARRAY_FOREACH ( i, m_dFilters ) if ( !m_dFilters[i]->EvalBlock ( pMinDocinfo, pMaxDocinfo ) ) return false; return true; } virtual ISphFilter * Join ( ISphFilter * pFilter ) { Add ( pFilter ); return this; } Filter_And () { m_bUsesAttrs = false; } virtual void SetMVAStorage ( const DWORD * pMva ) { ARRAY_FOREACH ( i, m_dFilters ) m_dFilters[i]->SetMVAStorage ( pMva ); } }; // not struct Filter_Not: public ISphFilter { ISphFilter * m_pFilter; explicit Filter_Not ( ISphFilter * pFilter ) : m_pFilter ( pFilter ) { assert ( pFilter ); m_bUsesAttrs = pFilter->UsesAttrs(); } ~Filter_Not () { SafeDelete ( m_pFilter ); } virtual bool Eval ( const CSphMatch & tMatch ) const { return !m_pFilter->Eval ( tMatch ); } virtual bool EvalBlock ( const DWORD *, const DWORD * ) const { // if block passes through the filter we can't just negate the // result since it's imprecise at this point return true; } virtual void SetMVAStorage ( const DWORD * pMva ) { m_pFilter->SetMVAStorage ( pMva ); } }; /// impl ISphFilter * ISphFilter::Join ( ISphFilter * pFilter ) { Filter_And * pAnd = new Filter_And(); pAnd->Add ( this ); pAnd->Add ( pFilter ); return pAnd; } /// helper functions static ISphFilter * CreateSpecialFilter ( const CSphString & sName, ESphFilter eFilterType ) { if ( sName=="@id" ) { switch ( eFilterType ) { case SPH_FILTER_VALUES: return new Filter_IdValues; case SPH_FILTER_RANGE: return new Filter_IdRange; default: assert ( 0 && "invalid filter on @id" ); return NULL; } } else if ( sName=="@weight" ) { switch ( eFilterType ) { case SPH_FILTER_VALUES: return new Filter_WeightValues; case SPH_FILTER_RANGE: return new Filter_WeightRange; default: assert ( 0 && "invalid filter on @weight" ); return NULL; } } return NULL; } static inline ISphFilter * ReportError ( CSphString & sError, const char * sMessage, ESphFilter eFilterType ) { CSphString sFilterName; switch ( eFilterType ) { case SPH_FILTER_VALUES: sFilterName = "intvalues"; break; case SPH_FILTER_RANGE: sFilterName = "intrange"; break; case SPH_FILTER_FLOATRANGE: sFilterName = "floatrange"; break; default: sFilterName.SetSprintf ( "(filter-type-%d)", eFilterType ); break; } sError.SetSprintf ( sMessage, sFilterName.cstr() ); return NULL; } static ISphFilter * CreateFilter ( ESphAttr eAttrType, ESphFilter eFilterType, CSphString & sError ) { // MVA if ( eAttrType==SPH_ATTR_UINT32SET || eAttrType==SPH_ATTR_UINT64SET ) { switch ( eFilterType ) { case SPH_FILTER_VALUES: if ( eAttrType==SPH_ATTR_UINT64SET ) return new Filter_MVAValues(); else return new Filter_MVAValues(); case SPH_FILTER_RANGE: if ( eAttrType==SPH_ATTR_UINT64SET ) return new Filter_MVARange(); else return new Filter_MVARange(); default: return ReportError ( sError, "unsupported filter type '%s' on MVA column", eFilterType ); } } // float if ( eAttrType==SPH_ATTR_FLOAT ) { if ( eFilterType==SPH_FILTER_FLOATRANGE ) return new Filter_FloatRange; return ReportError ( sError, "unsupported filter type '%s' on float column", eFilterType ); } // non-float, non-MVA switch ( eFilterType ) { case SPH_FILTER_VALUES: return new Filter_Values; case SPH_FILTER_RANGE: return new Filter_Range; default: return ReportError ( sError, "unsupported filter type '%s' on int column", eFilterType ); } } ISphFilter * sphCreateFilter ( const CSphFilterSettings & tSettings, const CSphSchema & tSchema, const DWORD * pMvaPool, CSphString & sError ) { ISphFilter * pFilter = 0; // try to create filter on special attribute const CSphString & sAttrName = tSettings.m_sAttrName; if ( sAttrName.Begins("@") ) pFilter = CreateSpecialFilter ( sAttrName, tSettings.m_eType ); // fetch column info const CSphColumnInfo * pAttr = NULL; const int iAttr = tSchema.GetAttrIndex ( sAttrName.cstr() ); if ( iAttr<0 ) { if ( !pFilter ) // might be special { sError.SetSprintf ( "no such filter attribute '%s'", sAttrName.cstr() ); return NULL; // no such attribute } } else { assert ( !pFilter ); pAttr = &tSchema.GetAttr(iAttr); pFilter = CreateFilter ( pAttr->m_eAttrType, tSettings.m_eType, sError ); } // fill filter's properties if ( pFilter ) { if ( pAttr ) pFilter->SetLocator ( pAttr->m_tLocator ); pFilter->SetRange ( tSettings.m_uMinValue, tSettings.m_uMaxValue ); pFilter->SetRangeFloat ( tSettings.m_fMinValue, tSettings.m_fMaxValue ); pFilter->SetMVAStorage ( pMvaPool ); if ( tSettings.GetNumValues() > 0 ) { pFilter->SetValues ( tSettings.GetValueArray(), tSettings.GetNumValues() ); #ifndef NDEBUG // check that the values are actually sorted const SphAttr_t * pValues = tSettings.GetValueArray(); int iValues = tSettings.GetNumValues (); for ( int i=1; i=pValues[i-1] ); #endif } if ( tSettings.m_bExclude ) pFilter = new Filter_Not ( pFilter ); } return pFilter; } ISphFilter * sphJoinFilters ( ISphFilter * pA, ISphFilter * pB ) { if ( pA ) return pB ? pA->Join(pB) : pA; return pB; } // // $Id: sphinxfilter.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/yysphinxquery.c0000644000176700017710000011641011720727645017675 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_KEYWORD = 258, TOK_NEAR = 259, TOK_INT = 260, TOK_FIELDLIMIT = 261, TOK_ZONE = 262, TOK_BEFORE = 263, TOK_SENTENCE = 264, TOK_PARAGRAPH = 265 }; #endif #define TOK_KEYWORD 258 #define TOK_NEAR 259 #define TOK_INT 260 #define TOK_FIELDLIMIT 261 #define TOK_ZONE 262 #define TOK_BEFORE 263 #define TOK_SENTENCE 264 #define TOK_PARAGRAPH 265 /* Copy the first part of user declarations. */ #if USE_WINDOWS #pragma warning(push,1) #endif /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 0 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 1 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef union YYSTYPE { XQNode_t * pNode; // tree node struct { int iValue; int iStrIndex; } tInt; struct // field spec { CSphSmallBitvec dMask; // acceptable fields mask int iMaxPos; // max allowed position within field } tFieldLimit; int iZoneVec; } YYSTYPE; /* Line 186 of yacc.c. */ # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #if ! defined (yyoverflow) || YYERROR_VERBOSE /* The parser invokes alloca or malloc; define the necessary symbols. */ # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # else # ifndef YYSTACK_USE_ALLOCA # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC malloc # define YYSTACK_FREE free # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 9 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 110 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 20 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 15 /* YYNRULES -- Number of rules. */ #define YYNRULES 45 /* YYNRULES -- Number of states. */ #define YYNSTATES 69 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 265 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 18, 2, 2, 2, 16, 17, 2, 2, 2, 11, 2, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 19, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 12, 2, 14, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned char yyprhs[] = { 0, 0, 3, 5, 7, 10, 11, 13, 15, 17, 21, 25, 27, 31, 34, 39, 41, 43, 45, 48, 53, 58, 62, 68, 74, 78, 80, 83, 86, 90, 92, 94, 98, 102, 106, 110, 112, 116, 118, 121, 123, 125, 127, 129, 131, 133 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yysigned_char yyrhs[] = { 21, 0, -1, 22, -1, 24, -1, 22, 24, -1, -1, 6, -1, 7, -1, 25, -1, 24, 8, 25, -1, 24, 4, 25, -1, 26, -1, 23, 11, 26, -1, 23, 27, -1, 26, 12, 23, 27, -1, 28, -1, 30, -1, 31, -1, 13, 13, -1, 13, 13, 14, 5, -1, 13, 13, 15, 5, -1, 13, 33, 13, -1, 13, 33, 13, 14, 5, -1, 13, 33, 13, 15, 5, -1, 16, 22, 17, -1, 29, -1, 29, 18, -1, 19, 29, -1, 19, 29, 18, -1, 3, -1, 5, -1, 32, 9, 32, -1, 30, 9, 32, -1, 32, 10, 32, -1, 31, 10, 32, -1, 28, -1, 13, 33, 13, -1, 34, -1, 33, 34, -1, 28, -1, 16, -1, 17, -1, 11, -1, 12, -1, 14, -1, 15, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned char yyrline[] = { 0, 53, 53, 57, 58, 61, 63, 64, 68, 69, 70, 74, 75, 79, 80, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 98, 99, 100, 101, 105, 106, 110, 111, 115, 116, 120, 121, 125, 126, 130, 131, 132, 133, 134, 135, 136 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "TOK_KEYWORD", "TOK_NEAR", "TOK_INT", "TOK_FIELDLIMIT", "TOK_ZONE", "TOK_BEFORE", "TOK_SENTENCE", "TOK_PARAGRAPH", "'-'", "'|'", "'\"'", "'~'", "'/'", "'('", "')'", "'$'", "'^'", "$accept", "query", "expr", "tok_limiter", "beforelist", "orlistf", "orlist", "atom", "keyword", "rawkeyword", "sentence", "paragraph", "sp_item", "phrase", "phrasetoken", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 45, 124, 34, 126, 47, 40, 41, 36, 94 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 20, 21, 22, 22, 23, 23, 23, 24, 24, 24, 25, 25, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 29, 29, 30, 30, 31, 31, 32, 32, 33, 33, 34, 34, 34, 34, 34, 34, 34 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 1, 2, 0, 1, 1, 1, 3, 3, 1, 3, 2, 4, 1, 1, 1, 2, 4, 4, 3, 5, 5, 3, 1, 2, 2, 3, 1, 1, 3, 3, 3, 3, 1, 3, 1, 2, 1, 1, 1, 1, 1, 1, 1 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 5, 6, 7, 0, 5, 0, 3, 8, 11, 1, 4, 29, 30, 5, 0, 5, 0, 13, 15, 25, 16, 17, 0, 5, 5, 5, 0, 12, 42, 43, 18, 44, 45, 40, 41, 39, 0, 37, 5, 27, 26, 0, 0, 0, 0, 10, 9, 0, 0, 0, 21, 38, 24, 28, 0, 35, 32, 34, 31, 33, 14, 19, 20, 0, 0, 0, 22, 23, 36 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yysigned_char yydefgoto[] = { -1, 3, 4, 5, 6, 7, 8, 17, 35, 19, 20, 21, 22, 36, 37 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -35 static const yysigned_char yypact[] = { 20, -35, -35, 9, 40, 83, 21, -35, 22, -35, 21, -35, -35, 20, 0, 20, 1, -35, 35, -11, 19, 71, 65, 20, 20, 20, 87, 22, -35, -35, 62, -35, -35, -35, -35, -35, 38, -35, 16, 12, -35, 5, 5, 5, 5, -35, -35, 87, 43, 58, 95, -35, -35, -35, 68, -35, -35, -35, -35, -35, -35, -35, -35, 73, 84, 53, -35, -35, -35 }; /* YYPGOTO[NTERM-NUM]. */ static const yysigned_char yypgoto[] = { -35, -35, 76, 7, -3, 74, 80, 48, -5, 85, -35, -35, 18, 54, -34 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -37 static const yysigned_char yytable[] = { 18, 10, 51, 11, 11, 12, 12, 40, 11, 9, 12, 28, 29, 30, 31, 32, 33, 34, 54, 16, 26, 18, 1, 2, 16, 23, 1, 2, 41, 24, 53, 51, 47, 52, 25, 10, 55, 55, 55, 55, -2, 11, 18, 12, -35, -35, 1, 2, 61, 28, 29, 50, 31, 32, 33, 34, 11, 16, 12, 56, 57, 58, 59, 62, 28, 29, 68, 31, 32, 33, 34, 11, 16, 12, 43, 44, 48, 49, 66, 28, 29, 42, 31, 32, 33, 34, 11, 16, 12, 67, 11, 38, 12, 27, 13, 60, 14, 45, 46, 15, 14, 39, 16, 15, -36, -36, 16, 0, 65, 63, 64 }; static const yysigned_char yycheck[] = { 5, 4, 36, 3, 3, 5, 5, 18, 3, 0, 5, 11, 12, 13, 14, 15, 16, 17, 13, 19, 13, 26, 6, 7, 19, 4, 6, 7, 9, 8, 18, 65, 25, 17, 12, 38, 41, 42, 43, 44, 0, 3, 47, 5, 9, 10, 6, 7, 5, 11, 12, 13, 14, 15, 16, 17, 3, 19, 5, 41, 42, 43, 44, 5, 11, 12, 13, 14, 15, 16, 17, 3, 19, 5, 9, 10, 14, 15, 5, 11, 12, 10, 14, 15, 16, 17, 3, 19, 5, 5, 3, 15, 5, 13, 11, 47, 13, 23, 24, 16, 13, 16, 19, 16, 9, 10, 19, -1, 54, 14, 15 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 6, 7, 21, 22, 23, 24, 25, 26, 0, 24, 3, 5, 11, 13, 16, 19, 27, 28, 29, 30, 31, 32, 4, 8, 12, 23, 26, 11, 12, 13, 14, 15, 16, 17, 28, 33, 34, 22, 29, 18, 9, 10, 9, 10, 25, 25, 23, 14, 15, 13, 34, 17, 18, 13, 28, 32, 32, 32, 32, 27, 5, 5, 14, 15, 33, 5, 5, 13 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrlab1 /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror (pParser, "syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ Current.first_line = Rhs[1].first_line; \ Current.first_column = Rhs[1].first_column; \ Current.last_line = Rhs[N].last_line; \ Current.last_column = Rhs[N].last_column; #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval, pParser) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (cinluded). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short *bottom, short *top) #else static void yy_stack_print (bottom, top) short *bottom; short *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylineno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylineno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( XQParser_t * pParser ); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( XQParser_t * pParser ) #else int yyparse (pParser) XQParser_t * pParser ; #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short yyssa[YYINITDEPTH]; short *yyss = yyssa; register short *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 2: { pParser->AddQuery ( yyvsp[0].pNode ); ;} break; case 3: { yyval.pNode = yyvsp[0].pNode; ;} break; case 4: { yyval.pNode = pParser->AddOp ( SPH_QUERY_AND, yyvsp[-1].pNode, yyvsp[0].pNode ); ;} break; case 6: { pParser->SetFieldSpec ( yyvsp[0].tFieldLimit.dMask, yyvsp[0].tFieldLimit.iMaxPos ); ;} break; case 7: { pParser->SetZoneVec ( yyvsp[0].iZoneVec ); ;} break; case 8: { yyval.pNode = yyvsp[0].pNode; ;} break; case 9: { yyval.pNode = pParser->AddOp ( SPH_QUERY_BEFORE, yyvsp[-2].pNode, yyvsp[0].pNode ); ;} break; case 10: { yyval.pNode = pParser->AddOp ( SPH_QUERY_NEAR, yyvsp[-2].pNode, yyvsp[0].pNode, yyvsp[-1].tInt.iValue ); ;} break; case 11: { yyval.pNode = yyvsp[0].pNode; ;} break; case 12: { yyval.pNode = pParser->AddOp ( SPH_QUERY_NOT, yyvsp[0].pNode, NULL ); ;} break; case 13: { yyval.pNode = yyvsp[0].pNode; ;} break; case 14: { yyval.pNode = pParser->AddOp ( SPH_QUERY_OR, yyvsp[-3].pNode, yyvsp[0].pNode ); ;} break; case 15: { yyval.pNode = yyvsp[0].pNode; ;} break; case 16: { yyval.pNode = yyvsp[0].pNode; ;} break; case 17: { yyval.pNode = yyvsp[0].pNode; ;} break; case 18: { yyval.pNode = NULL; ;} break; case 19: { yyval.pNode = NULL; ;} break; case 20: { yyval.pNode = NULL; ;} break; case 21: { yyval.pNode = yyvsp[-1].pNode; if ( yyval.pNode ) { assert ( yyval.pNode->m_dWords.GetLength() ); yyval.pNode->SetOp ( SPH_QUERY_PHRASE); } ;} break; case 22: { yyval.pNode = yyvsp[-3].pNode; if ( yyval.pNode ) { assert ( yyval.pNode->m_dWords.GetLength() ); yyval.pNode->SetOp ( SPH_QUERY_PROXIMITY ); yyval.pNode->m_iOpArg = yyvsp[0].tInt.iValue; } ;} break; case 23: { yyval.pNode = yyvsp[-3].pNode; if ( yyval.pNode ) { assert ( yyval.pNode->m_dWords.GetLength() ); yyval.pNode->SetOp ( SPH_QUERY_QUORUM ); yyval.pNode->m_iOpArg = yyvsp[0].tInt.iValue; } ;} break; case 24: { yyval.pNode = yyvsp[-1].pNode; if ( yyval.pNode ) yyval.pNode->m_dSpec.Hide(); pParser->m_dStateSpec.Reset(); ;} break; case 26: { yyval.pNode = yyvsp[-1].pNode; assert ( yyval.pNode->m_dWords.GetLength()==1 ); yyval.pNode->m_dWords[0].m_bFieldEnd = true; ;} break; case 27: { yyval.pNode = yyvsp[0].pNode; assert ( yyval.pNode->m_dWords.GetLength()==1 ); yyval.pNode->m_dWords[0].m_bFieldStart = true; ;} break; case 28: { yyval.pNode = yyvsp[-1].pNode; assert ( yyval.pNode->m_dWords.GetLength()==1 ); yyval.pNode->m_dWords[0].m_bFieldStart = true; yyval.pNode->m_dWords[0].m_bFieldEnd = true; ;} break; case 29: { yyval.pNode = yyvsp[0].pNode; ;} break; case 30: { yyval.pNode = pParser->AddKeyword ( ( yyvsp[0].tInt.iStrIndex>=0 ) ? pParser->m_dIntTokens[yyvsp[0].tInt.iStrIndex].cstr() : NULL ); ;} break; case 31: { yyval.pNode = pParser->AddOp ( SPH_QUERY_SENTENCE, yyvsp[-2].pNode, yyvsp[0].pNode ); ;} break; case 32: { yyval.pNode = pParser->AddOp ( SPH_QUERY_SENTENCE, yyvsp[-2].pNode, yyvsp[0].pNode ); ;} break; case 33: { yyval.pNode = pParser->AddOp ( SPH_QUERY_PARAGRAPH, yyvsp[-2].pNode, yyvsp[0].pNode ); ;} break; case 34: { yyval.pNode = pParser->AddOp ( SPH_QUERY_PARAGRAPH, yyvsp[-2].pNode, yyvsp[0].pNode ); ;} break; case 35: { yyval.pNode = yyvsp[0].pNode; ;} break; case 36: { yyval.pNode = yyvsp[-1].pNode; if ( yyval.pNode ) { assert ( yyval.pNode->m_dWords.GetLength() ); yyval.pNode->SetOp ( SPH_QUERY_PHRASE); } ;} break; case 37: { yyval.pNode = yyvsp[0].pNode; ;} break; case 38: { yyval.pNode = pParser->AddKeyword ( yyvsp[-1].pNode, yyvsp[0].pNode ); ;} break; case 39: { yyval.pNode = yyvsp[0].pNode; ;} break; case 40: { yyval.pNode = NULL; ;} break; case 41: { yyval.pNode = NULL; ;} break; case 42: { yyval.pNode = NULL; ;} break; case 43: { yyval.pNode = NULL; ;} break; case 44: { yyval.pNode = NULL; ;} break; case 45: { yyval.pNode = NULL; ;} break; } /* Line 991 of yacc.c. */ yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); char *yymsg; int yyx, yycount; yycount = 0; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) yysize += yystrlen (yytname[yyx]) + 15, yycount++; yysize += yystrlen ("syntax error, unexpected ") + 1; yysize += yystrlen (yytname[yytype]); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 5) { yycount = 0; for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { const char *yyq = ! yycount ? ", expecting " : " or "; yyp = yystpcpy (yyp, yyq); yyp = yystpcpy (yyp, yytname[yyx]); yycount++; } } yyerror (pParser, yymsg); YYSTACK_FREE (yymsg); } else yyerror (pParser, "syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror (pParser, "syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ /* Return failure if at end of input. */ if (yychar == YYEOF) { /* Pop the error token. */ YYPOPSTACK; /* Pop the rest of the stack. */ while (yyss < yyssp) { YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); YYPOPSTACK; } YYABORT; } YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab2; /*----------------------------------------------------. | yyerrlab1 -- error raised explicitly by an action. | `----------------------------------------------------*/ //yyerrlab1: /* Suppress GCC warning that yyerrlab1 is unused when no action invokes YYERROR. */ #if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__) // __attribute__ ((__unused__)) #endif goto yyerrlab2; /*---------------------------------------------------------------. | yyerrlab2 -- pop states until the error token can be shifted. | `---------------------------------------------------------------*/ yyerrlab2: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); yyvsp--; yystate = *--yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror (pParser, "parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/tests.cpp0000644000176700017710000020130011723624274016404 0ustar deogardeogar// // $Id: tests.cpp 3130 2012-03-01 07:43:56Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxexpr.h" #include "sphinxutils.h" #include "sphinxquery.h" #include "sphinxrt.h" #include "sphinxint.h" #include "sphinxstem.h" #include #define SNOWBALL 0 #define CROSSCHECK 0 #define PORTER1 0 #if SNOWBALL #include "header.h" #include "api.c" #include "utilities.c" #include "stem.c" #endif #if PORTER1 #include "porter1.c" #endif ////////////////////////////////////////////////////////////////////////// const char * g_sTmpfile = "__libsphinxtest.tmp"; ////////////////////////////////////////////////////////////////////////// bool CreateSynonymsFile ( const char * sMagic ) { FILE * fp = fopen ( g_sTmpfile, "w+" ); if ( !fp ) return false; fprintf ( fp, "AT&T => AT&T\n" " AT & T => AT & T \n" "standarten fuehrer => Standartenfuehrer\n" "standarten fuhrer => Standartenfuehrer\n" "OS/2 => OS/2\n" "Ms-Dos => MS-DOS\n" "MS DOS => MS-DOS\n" "feat. => featuring\n" "U.S. => US\n" "U.S.A. => USA\n" "U.S.B. => USB\n" "U.S.D. => USD\n" "U.S.P. => USP\n" "U.S.A.F. => USAF\n" ); if ( sMagic ) fprintf ( fp, "%s => test\n", sMagic ); fclose ( fp ); return true; } const DWORD TOK_EXCEPTIONS = 1; const DWORD TOK_ESCAPED = 2; const DWORD TOK_NO_DASH = 4; ISphTokenizer * CreateTestTokenizer ( bool bUTF8, DWORD uMode ) { CSphString sError; CSphTokenizerSettings tSettings; tSettings.m_iType = bUTF8 ? TOKENIZER_UTF8 : TOKENIZER_SBCS; tSettings.m_iMinWordLen = 2; ISphTokenizer * pTokenizer = ISphTokenizer::Create ( tSettings, sError ); if (!( uMode & TOK_NO_DASH )) { assert ( pTokenizer->SetCaseFolding ( "-, 0..9, A..Z->a..z, _, a..z, U+80..U+FF", sError ) ); pTokenizer->AddSpecials ( "!-" ); } else { assert ( pTokenizer->SetCaseFolding ( "0..9, A..Z->a..z, _, a..z, U+80..U+FF", sError ) ); pTokenizer->AddSpecials ( "!" ); } pTokenizer->EnableQueryParserMode ( true ); if ( uMode & TOK_EXCEPTIONS ) assert ( pTokenizer->LoadSynonyms ( g_sTmpfile, sError ) ); if ( uMode & TOK_ESCAPED ) { ISphTokenizer * pOldTokenizer = pTokenizer; pTokenizer = pTokenizer->Clone ( true ); SafeDelete ( pOldTokenizer ); } return pTokenizer; } void TestTokenizer ( bool bUTF8 ) { const char * sPrefix = bUTF8 ? "testing UTF8 tokenizer" : "testing SBCS tokenizer"; for ( int iRun=1; iRun<=3; iRun++ ) { // simple "one-line" tests const char * sMagic = bUTF8 ? "\xD1\x82\xD0\xB5\xD1\x81\xD1\x82\xD1\x82\xD1\x82" // valid UTF-8 : "\xC0\xC1\xF5\xF6"; // valid SBCS but invalid UTF-8 assert ( CreateSynonymsFile ( sMagic ) ); bool bExceptions = ( iRun>=2 ); bool bEscaped = ( iRun==3 ); ISphTokenizer * pTokenizer = CreateTestTokenizer ( bUTF8, bExceptions*TOK_EXCEPTIONS + bEscaped*TOK_ESCAPED ); const char * dTests[] = { "1", "", NULL, // test that empty strings work "1", "this is my rifle", "this", "is", "my", "rifle", NULL, // test that tokenizing works "1", "This is MY rifle", "this", "is", "my", "rifle", NULL, // test that folding works "1", "i-phone", "i-phone", NULL, // test that duals (specials in the middle of the word) work ok "1", "i phone", "phone", NULL, // test that short words are skipped "1", "this is m", "this", "is", NULL, // test that short words at the end are skipped "1", "the -phone", "the", "-", "phone", NULL, // test that specials work "1", "the!phone", "the", "!", "phone", NULL, // test that specials work "1", "i!phone", "!", "phone", NULL, // test that short words preceding specials are skipped "1", "/-hi", "-", "hi", NULL, // test that synonym-dual but folded-special chars work ok "2", "AT&T", "AT&T", NULL, // test that synonyms work "2", "AT & T", "AT & T", NULL, // test that synonyms with spaces work "2", "AT & T", "AT & T", NULL, // test that synonyms with continuous spaces work "2", "-AT&T", "-", "AT&T", NULL, // test that synonyms with specials work "2", "AT&", "at", NULL, // test that synonyms prefixes are not lost on eof "2", "AT&tee.yo", "at", "tee", "yo", NULL, // test that non-synonyms with partially matching prefixes work "2", "standarten fuehrer", "Standartenfuehrer", NULL, "2", "standarten fuhrer", "Standartenfuehrer", NULL, "2", "standarten fuehrerr", "standarten", "fuehrerr", NULL, "2", "standarten fuehrer Stirlitz", "Standartenfuehrer", "stirlitz", NULL, "2", "OS/2 vs OS/360 vs Ms-Dos", "OS/2", "vs", "os", "360", "vs", "MS-DOS", NULL, "2", "AT ", "at", NULL, // test that prefix-whitespace-eof combo does not hang "2", "AT&T&TT", "AT&T", "tt", NULL, "2", "http://OS/2", "http", "OS/2", NULL, "2", "AT*&*T", "at", NULL, "2", "# OS/2's system install", "OS/2", "system", "install", NULL, "2", "IBM-s/OS/2/Merlin", "ibm-s", "OS/2", "merlin", NULL, "2", "U.S.A", "US", NULL, "2", "AT&T!", "AT&T", "!", NULL, // exceptions vs specials "2", "AT&T!!!", "AT&T", "!", "!", "!", NULL, // exceptions vs specials "2", "U.S.A.!", "USA", "!", NULL, // exceptions vs specials "2", "MS DOSS feat.Deskview.MS DOS", "ms", "doss", "featuring", "deskview", "MS-DOS", NULL, "2", sMagic, "test", NULL, "2", "U.S. U.S.A. U.S.A.F.", "US", "USA", "USAF", NULL, "2", "U.S.AB U.S.A. U.S.B.U.S.D.U.S.U.S.A.F.", "US", "ab", "USA", "USB", "USD", "US", "USAF", NULL, "3", "phon\\e", "phone", NULL, "3", "\\thephone", "thephone", NULL, "3", "the\\!phone", "the", "phone", NULL, "3", "\\!phone", "phone", NULL, "3", "\\\\phone", "phone", NULL, // the correct behavior if '\' is not in charset "3", "pho\\\\ne", "pho", "ne", NULL, "3", "phon\\\\e", "phon", NULL, "3", "trailing\\", "trailing", NULL, NULL }; for ( int iCur=0; dTests[iCur] && atoi ( dTests[iCur++] )<=iRun; ) { printf ( "%s, run=%d, line=%s\n", sPrefix, iRun, dTests[iCur] ); pTokenizer->SetBuffer ( (BYTE*)dTests[iCur], strlen ( dTests[iCur] ) ); iCur++; for ( BYTE * pToken=pTokenizer->GetToken(); pToken; pToken=pTokenizer->GetToken() ) { assert ( dTests[iCur] && strcmp ( (const char*)pToken, dTests[iCur] )==0 ); iCur++; } assert ( dTests[iCur]==NULL ); iCur++; } // test misc SBCS-only and UTF8-only one-liners const char * dTests2[] = { "0", "\x80\x81\x82", "\x80\x81\x82", NULL, "1", "\xC2\x80\xC2\x81\xC2\x82", "\xC2\x80\xC2\x81\xC2\x82", NULL, NULL }; for ( int iCur=0; dTests2[iCur] && atoi ( dTests2[iCur++] )==int(bUTF8); ) { printf ( "%s, run=%d, line=%s\n", sPrefix, iRun, dTests2[iCur] ); pTokenizer->SetBuffer ( (BYTE*)dTests2[iCur], strlen ( dTests2[iCur] ) ); iCur++; for ( BYTE * pToken=pTokenizer->GetToken(); pToken; pToken=pTokenizer->GetToken() ) { assert ( dTests2[iCur] && strcmp ( (const char*)pToken, dTests2[iCur] )==0 ); iCur++; } assert ( dTests2[iCur]==NULL ); iCur++; } // test that decoder does not go over the buffer boundary on errors in UTF-8 if ( bUTF8 ) { printf ( "%s for proper UTF-8 error handling\n", sPrefix ); const char * sLine3 = "hi\xd0\xffh"; pTokenizer->SetBuffer ( (BYTE*)sLine3, 4 ); assert ( !strcmp ( (char*)pTokenizer->GetToken(), "hi" ) ); } // test uberlong tokens printf ( "%s for uberlong token handling\n", sPrefix ); const int UBERLONG = 4096; char * sLine4 = new char [ UBERLONG+1 ]; memset ( sLine4, 'a', UBERLONG ); sLine4[UBERLONG] = '\0'; char sTok4[SPH_MAX_WORD_LEN+1]; memset ( sTok4, 'a', SPH_MAX_WORD_LEN ); sTok4[SPH_MAX_WORD_LEN] = '\0'; pTokenizer->SetBuffer ( (BYTE*)sLine4, strlen(sLine4) ); assert ( !strcmp ( (char*)pTokenizer->GetToken(), sTok4 ) ); assert ( pTokenizer->GetToken()==NULL ); // test short word callbacks printf ( "%s for short token handling\n", sPrefix ); ISphTokenizer * pShortTokenizer = pTokenizer->Clone ( bEscaped ); CSphRemapRange tStar ( '*', '*', '*' ); pShortTokenizer->AddCaseFolding ( tStar ); CSphTokenizerSettings tSettings = pShortTokenizer->GetSettings(); tSettings.m_iMinWordLen = 5; pShortTokenizer->Setup ( tSettings ); pShortTokenizer->EnableQueryParserMode ( true ); const char * dTestsShort[] = { "ab*", "ab*", NULL, "*ab", "*ab", NULL, "abcdef", "abcdef", NULL, "ab *ab* abc", "*ab*", NULL, NULL }; for ( int iCur=0; dTestsShort[iCur]; ) { pShortTokenizer->SetBuffer ( (BYTE*)(dTestsShort [iCur]), strlen ( (const char*)dTestsShort [iCur] ) ); iCur++; for ( BYTE * pToken=pShortTokenizer->GetToken(); pToken; pToken=pShortTokenizer->GetToken() ) { assert ( dTestsShort[iCur] && strcmp ( (const char*)pToken, dTestsShort[iCur] )==0 ); iCur++; } assert ( !dTestsShort [iCur] ); iCur++; } SafeDelete ( pShortTokenizer ); // test uberlong synonym-only tokens if ( iRun==2 ) { printf ( "%s for uberlong synonym-only char token handling\n", sPrefix ); memset ( sLine4, '/', UBERLONG ); sLine4[UBERLONG] = '\0'; pTokenizer->SetBuffer ( (BYTE*)sLine4, strlen(sLine4) ); assert ( pTokenizer->GetToken()==NULL ); printf ( "%s for uberlong synonym token handling\n", sPrefix ); for ( int i=0; iSetBuffer ( (BYTE*)sLine4, strlen(sLine4) ); for ( int i=0; iGetToken(), "aa" ) ); assert ( pTokenizer->GetToken()==NULL ); } SafeDeleteArray ( sLine4 ); // test boundaries printf ( "%s for boundaries handling, run=%d\n", sPrefix, iRun ); CSphString sError; assert ( pTokenizer->SetBoundary ( "?", sError ) ); char sLine5[] = "hello world? testing boundaries?"; pTokenizer->SetBuffer ( (BYTE*)sLine5, strlen(sLine5) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "hello" ) ); assert ( !pTokenizer->GetBoundary() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "world" ) ); assert ( !pTokenizer->GetBoundary() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "testing" ) ); assert ( pTokenizer->GetBoundary() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "boundaries" ) ); assert ( !pTokenizer->GetBoundary() ); // test specials vs token start/end ptrs printf ( "%s vs specials vs token start/end ptrs\n", sPrefix ); char sLine6[] = "abc!def"; pTokenizer->SetBuffer ( (BYTE*)sLine6, strlen(sLine6) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "abc" ) ); assert ( *pTokenizer->GetTokenStart()=='a' ); assert ( *pTokenizer->GetTokenEnd()=='!' ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "!" ) ); assert ( *pTokenizer->GetTokenStart()=='!' ); assert ( *pTokenizer->GetTokenEnd()=='d' ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "def" ) ); assert ( *pTokenizer->GetTokenStart()=='d' ); assert ( *pTokenizer->GetTokenEnd()=='\0' ); // done SafeDelete ( pTokenizer ); } // test blended printf ( "%s vs escaping vs blend_chars edge cases\n", sPrefix ); CSphString sError; ISphTokenizer * pTokenizer = CreateTestTokenizer ( bUTF8, TOK_ESCAPED ); pTokenizer->AddSpecials ( "()!-\"" ); assert ( pTokenizer->SetBlendChars ( ".", sError ) ); char sTest1[] = "(texas.\\\")"; pTokenizer->SetBuffer ( (BYTE*)sTest1, strlen(sTest1) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "(" ) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "texas." ) ); assert ( pTokenizer->TokenIsBlended() ); pTokenizer->SkipBlended (); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), ")" ) ); assert ( pTokenizer->GetToken()==NULL ); char sTest2[] = "\"series 2003\\-\\\"\""; printf ( "test %s\n", sTest2 ); pTokenizer->SetBuffer ( (BYTE*)sTest2, strlen(sTest2) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "\"" ) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "series" ) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "2003-" ) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "\"" ) ); assert ( pTokenizer->GetToken()==NULL ); char sTest3[] = "aa lock.up bb"; printf ( "test %s\n", sTest3 ); pTokenizer->SetBuffer ( (BYTE*)sTest3, strlen(sTest3) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "aa" ) ); assert ( !pTokenizer->TokenIsBlended() ); assert ( !pTokenizer->TokenIsBlendedPart() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "lock.up" ) ); assert ( pTokenizer->TokenIsBlended() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "lock" ) ); assert ( !pTokenizer->TokenIsBlended() ); assert ( pTokenizer->TokenIsBlendedPart() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "up" ) ); assert ( !pTokenizer->TokenIsBlended() ); assert ( pTokenizer->TokenIsBlendedPart() ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "bb" ) ); assert ( !pTokenizer->TokenIsBlended() ); assert ( !pTokenizer->TokenIsBlendedPart() ); // blended/special vs query mode vs modifier.. hell, this is complicated CSphRemapRange tModifier ( '=', '=', '=' ); SafeDelete ( pTokenizer ); pTokenizer = CreateTestTokenizer ( bUTF8, TOK_NO_DASH ); assert ( pTokenizer->SetBlendChars ( "., -", sError ) ); pTokenizer->AddSpecials ( "-" ); pTokenizer->AddCaseFolding ( tModifier ); pTokenizer->EnableQueryParserMode ( true ); assert ( pTokenizer->SetBlendMode ( "trim_none, skip_pure", sError ) ); char sTest4[] = "hello =- =world"; printf ( "test %s\n", sTest4 ); pTokenizer->SetBuffer ( (BYTE*)sTest4, strlen(sTest4) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "hello" ) ); assert ( !strcmp ( (const char*)pTokenizer->GetToken(), "=world" ) ); SafeDelete ( pTokenizer ); printf ( "test utf8 len 1\n" ); assert ( sphUTF8Len ( "ab\0cd", 256 )==2 ); printf ( "test utf8 len 2\n" ); assert ( sphUTF8Len ( "", 256 )==0 && sphUTF8Len ( NULL, 256 )==0 ); } void BenchTokenizer ( bool bUTF8 ) { printf ( "benchmarking %s tokenizer\n", bUTF8 ? "UTF8" : "SBCS" ); if ( !CreateSynonymsFile ( NULL ) ) { printf ( "benchmark failed: error writing temp synonyms file\n" ); return; } const char * sTestfile = "./configure"; for ( int iRun=1; iRun<=2; iRun++ ) { FILE * fp = fopen ( sTestfile, "rb" ); if ( !fp ) { printf ( "benchmark failed: error opening %s\n", sTestfile ); return; } const int MAX_DATA = 10485760; char * sData = new char [ MAX_DATA ]; int iData = fread ( sData, 1, MAX_DATA, fp ); fclose ( fp ); if ( iData<=0 ) { printf ( "benchmark failed: error reading %s\n", sTestfile ); SafeDeleteArray ( sData ); return; } CSphString sError; ISphTokenizer * pTokenizer = bUTF8 ? sphCreateUTF8Tokenizer () : sphCreateSBCSTokenizer (); pTokenizer->SetCaseFolding ( "-, 0..9, A..Z->a..z, _, a..z", sError ); if ( iRun==2 ) pTokenizer->LoadSynonyms ( g_sTmpfile, sError ); pTokenizer->AddSpecials ( "!-" ); const int iPasses = 10; int iTokens = 0; int64_t tmTime = -sphMicroTimer(); for ( int iPass=0; iPassSetBuffer ( (BYTE*)sData, iData ); while ( pTokenizer->GetToken() ) iTokens++; } tmTime += sphMicroTimer(); iTokens /= iPasses; tmTime /= iPasses; printf ( "run %d: %d bytes, %d tokens, %d.%03d ms, %.3f MB/sec\n", iRun, iData, iTokens, (int)(tmTime/1000), (int)(tmTime%1000), float(iData)/tmTime ); SafeDeleteArray ( sData ); } } ////////////////////////////////////////////////////////////////////////// void TestStripper () { const char * sTests[][4] = { // source-data, index-attrs, remove-elements, expected-results { "trivial test", "", "", " trivial test " }, { "lets \"niceindex attrs", "img=alt", "", " lets nice picture index attrs " }, { " lets alsoremove scripts", "", "script, style", " lets also remove scripts" }, { "testing inline elements", "", "", "testing inline elements" }, { "testing non

inlineelements", "", "", "testing non inline elements" }, { "testing entities&stuff", "", "", "testing entities&stuff" }, { "testing АБВ utf encoding", "", "", "testing \xD0\x90\xD0\x91\xD0\x92 utf encoding" }, { "testing <1 <\" <\x80 <\xe0 <\xff ents", "", "", "testing comments" }, { "< > ϑ &somethingverylong; &the", "", "", "< > \xCF\x91 &somethingverylong; &the" }, { "testing \"niceinline tags vs attr indexing", "img=alt,rel", "", "testing nice picture anotherattr inline tags vs attr indexing" }, { "this away", "", "", "this away" }, { "content1", "a=title", "", "content1" }, { "content2", "a=title", "", "my test title content2" }, { "testing \"niceinline tags vs attr indexing", "img=alt,rel", "", "testing nice picture anotherattr inline tags vs attr indexing" }, { "test", "", "", " test " }, { "cantest", "", "", " test " }, { "
ohai
", "", "", " ohai " }, { "ohai23", "", "", "ohai 3" }, { "ohai
4", "", "", "ohai 4" }, { "ohai
5", "", "", "ohai 5" }, { "ohai
6
some more content", "", "", "ohai 6 some more content" }, { "ohaib\">7", "", "", "ohai 7" }, { "ohai
b\">8", "", "", "ohai 8" }, { "ohai
b\">9", "", "", "ohai 9" }, { "ohai
b\">10", "", "", "ohai 10" }, { "ohai
611
gimme more", "", "", "ohai 11 gimme more" }, { "

Commission File Number: 333-155507", "", "", " Commission File Number: 333-155507" }, { "SGX", "", "", " SGX" }, { "tango & cash", "", "", "tango & cash" }, { "ahoy\"mate", "font=zzz", "", "ahoy\"mate" }, { "ahoy2", "font=zzz", "", "ahoy2" }, { "ahoy3there", "font=zzz", "", "ahoy3there" }, { "ahoyb\">4", "font=zzz", "", "ahoy4" }, { "ahoyb\">5", "font=zzz", "", "ahoy5" }, { "ahoy6seveneight", "font=zzz", "", "ahoyseveneight" } }; int nTests = (int)(sizeof(sTests)/sizeof(sTests[0])); for ( int iTest=0; iTest=2,3,4)", 3.0f }, { "pow(7,5)", 16807.f }, { "sqrt(3)", 1.7320508f }, { "log2((2+2)*(2+2))", 4.0f }, { "min(3,15)", 3.0f }, { "max(3,15)", 15.0f }, { "if(3<15,bbb,ccc)", 2.0f }, { "@id+@weight", 579.0f }, { "abs(-3-ccc)", 6.0f }, { "(aaa+bbb)*(ccc-aaa)", 6.0f }, { "(((aaa)))", 1.0f }, { "aaa-bbb*ccc", -5.0f }, { " aaa -\tbbb *\t\t\tccc ", -5.0f }, { "bbb+123*aaa", 125.0f }, { "2.000*2e+1+2", 42.0f }, { "3<5", 1.0f }, { "1 + 2*3 > 4*4", 0.0f }, { "aaa/-bbb", -0.5f, }, { "-10*-10", 100.0f }, { "aaa+-bbb*-5", 11.0f }, { "-aaa>-bbb", 1.0f }, { "1-aaa+2-3+4", 3.0f }, { "bbb/1*2/6*3", 2.0f }, { "(aaa+bbb)/sqrt(3)/sqrt(3)", 1.0f }, { "aaa-bbb-2", -3.0f }, { "ccc/2*4/bbb", 3.0f }, { "(2+(aaa*bbb))+3", 7.0f } }; const int nTests = sizeof(dTests)/sizeof(dTests[0]); for ( int iTest=0; iTest pExpr ( sphExprParse ( dTests[iTest].m_sExpr, tSchema, NULL, NULL, sError ) ); if ( !pExpr.Ptr() ) { printf ( "FAILED; %s\n", sError.cstr() ); assert ( 0 ); } float fValue = pExpr->Eval(tMatch); if ( fabs ( fValue - dTests[iTest].m_fValue )>=0.0001f ) { printf ( "FAILED; expected %.3f, got %.3f\n", dTests[iTest].m_fValue, fValue ); assert ( 0 ); } printf ( "ok\n" ); } SafeDeleteArray ( pRow ); } #if USE_WINDOWS #define NOINLINE __declspec(noinline) #else #define NOINLINE #endif #define AAA float(tMatch.m_pStatic[0]) #define BBB float(tMatch.m_pStatic[1]) #define CCC float(tMatch.m_pStatic[2]) NOINLINE float ExprNative1 ( const CSphMatch & tMatch ) { return AAA+BBB*CCC-1.0f;} NOINLINE float ExprNative2 ( const CSphMatch & tMatch ) { return AAA+BBB*CCC*2.0f-3.0f/4.0f*5.0f/6.0f*BBB; } NOINLINE float ExprNative3 ( const CSphMatch & ) { return (float)sqrt ( 2.0f ); } void BenchExpr () { printf ( "benchmarking expressions\n" ); CSphColumnInfo tCol; tCol.m_eAttrType = SPH_ATTR_INTEGER; CSphSchema tSchema; tCol.m_sName = "aaa"; tSchema.AddAttr ( tCol, false ); tCol.m_sName = "bbb"; tSchema.AddAttr ( tCol, false ); tCol.m_sName = "ccc"; tSchema.AddAttr ( tCol, false ); CSphRowitem * pRow = new CSphRowitem [ tSchema.GetRowSize() ]; for ( int i=0; i pExpr ( sphExprParse ( dBench[iRun].m_sExpr, tSchema, &uType, NULL, sError ) ); if ( !pExpr.Ptr() ) { printf ( "FAILED; %s\n", sError.cstr() ); return; } const int NRUNS = 1000000; volatile float fValue = 0.0f; int64_t tmTime = sphMicroTimer(); for ( int i=0; iEval(tMatch); tmTime = sphMicroTimer() - tmTime; int64_t tmTimeInt = sphMicroTimer(); if ( uType==SPH_ATTR_INTEGER ) { int uValue = 0; for ( int i=0; iIntEval(tMatch); } tmTimeInt = sphMicroTimer() - tmTimeInt; int64_t tmTimeNative = sphMicroTimer(); for ( int i=0; im_dWords.GetLength() ) { // say just words to me const CSphVector & dWords = pNode->m_dWords; ARRAY_FOREACH ( i, dWords ) sRes.SetSprintf ( "%s %s", sRes.cstr(), dWords[i].m_sWord.cstr() ); sRes.Chop (); switch ( pNode->GetOp() ) { case SPH_QUERY_AND: break; case SPH_QUERY_PHRASE: sRes.SetSprintf ( "\"%s\"", sRes.cstr() ); break; case SPH_QUERY_PROXIMITY: sRes.SetSprintf ( "\"%s\"~%d", sRes.cstr(), pNode->m_iOpArg ); break; case SPH_QUERY_QUORUM: sRes.SetSprintf ( "\"%s\"/%d", sRes.cstr(), pNode->m_iOpArg ); break; case SPH_QUERY_NEAR: sRes.SetSprintf ( "\"%s\"NEAR/%d", sRes.cstr(), pNode->m_iOpArg ); break; default: assert ( 0 && "unexpected op in ReconstructNode()" ); break; } if ( !pNode->m_dSpec.m_dFieldMask.TestAll(true) ) { CSphString sFields ( "" ); for ( int i=0; im_dSpec.m_dFieldMask.Test(i) ) sFields.SetSprintf ( "%s,%s", sFields.cstr(), tSchema.m_dFields[i].m_sName.cstr() ); sRes.SetSprintf ( "( @%s: %s )", sFields.cstr()+1, sRes.cstr() ); } else { if ( pNode->GetOp()==SPH_QUERY_AND && dWords.GetLength()>1 ) sRes.SetSprintf ( "( %s )", sRes.cstr() ); // wrap bag of words } } else { ARRAY_FOREACH ( i, pNode->m_dChildren ) { if ( !i ) sRes = ReconstructNode ( pNode->m_dChildren[i], tSchema ); else { const char * sOp = "(unknown-op)"; switch ( pNode->GetOp() ) { case SPH_QUERY_AND: sOp = "AND"; break; case SPH_QUERY_OR: sOp = "OR"; break; case SPH_QUERY_NOT: sOp = "NOT"; break; case SPH_QUERY_ANDNOT: sOp = "AND NOT"; break; case SPH_QUERY_BEFORE: sOp = "BEFORE"; break; case SPH_QUERY_NEAR: sOp = "NEAR"; break; default: assert ( 0 && "unexpected op in ReconstructNode()" ); break; } sRes.SetSprintf ( "%s %s %s", sRes.cstr(), sOp, ReconstructNode ( pNode->m_dChildren[i], tSchema ).cstr() ); } } if ( pNode->m_dChildren.GetLength()>1 ) sRes.SetSprintf ( "( %s )", sRes.cstr() ); } return sRes; } void TestQueryParser () { CSphString sTmp; CSphSchema tSchema; CSphColumnInfo tCol; tCol.m_sName = "title"; tSchema.m_dFields.Add ( tCol ); tCol.m_sName = "body"; tSchema.m_dFields.Add ( tCol ); CSphDictSettings tDictSettings; CSphScopedPtr pTokenizer ( sphCreateSBCSTokenizer () ); CSphScopedPtr pDict ( sphCreateDictionaryCRC ( tDictSettings, pTokenizer.Ptr(), sTmp, "query" ) ); assert ( pTokenizer.Ptr() ); assert ( pDict.Ptr() ); CSphTokenizerSettings tTokenizerSetup; tTokenizerSetup.m_iMinWordLen = 2; tTokenizerSetup.m_sSynonymsFile = g_sTmpfile; pTokenizer->Setup ( tTokenizerSetup ); CSphString sError; assert ( CreateSynonymsFile ( NULL ) ); assert ( pTokenizer->LoadSynonyms ( g_sTmpfile, sError ) ); struct QueryTest_t { const char * m_sQuery; const char * m_sReconst; }; const QueryTest_t dTest[] = { { "aaa bbb ccc", "( aaa AND bbb AND ccc )" }, { "aaa|bbb ccc", "( ( aaa OR bbb ) AND ccc )" }, { "aaa bbb|ccc", "( aaa AND ( bbb OR ccc ) )" }, { "aaa (bbb ccc)|ddd", "( aaa AND ( ( bbb AND ccc ) OR ddd ) )" }, { "aaa bbb|(ccc ddd)", "( aaa AND ( bbb OR ( ccc AND ddd ) ) )" }, { "aaa bbb|(ccc ddd)|eee|(fff)", "( aaa AND ( bbb OR ( ccc AND ddd ) OR eee OR fff ) )" }, { "aaa bbb|(ccc ddd) eee|(fff)", "( aaa AND ( bbb OR ( ccc AND ddd ) ) AND ( eee OR fff ) )" }, { "aaa (ccc ddd)|bbb|eee|(fff)", "( aaa AND ( ( ccc AND ddd ) OR bbb OR eee OR fff ) )" }, { "aaa (ccc ddd)|bbb eee|(fff)", "( aaa AND ( ( ccc AND ddd ) OR bbb ) AND ( eee OR fff ) )" }, { "aaa \"bbb ccc\"~5|ddd", "( aaa AND ( \"bbb ccc\"~5 OR ddd ) )" }, { "aaa bbb|\"ccc ddd\"~5", "( aaa AND ( bbb OR \"ccc ddd\"~5 ) )" }, { "aaa ( ( \"bbb ccc\"~3|ddd ) eee | ( fff -ggg ) )", "( aaa AND ( ( \"bbb ccc\"~3 OR ddd ) AND ( eee OR ( fff AND NOT ggg ) ) ) )" }, { "@title aaa @body ccc|(@title ddd eee)|fff ggg", "( ( @title: aaa ) AND ( ( @body: ccc ) OR ( ( @title: ddd ) AND ( @title: eee ) ) OR ( @body: fff ) ) AND ( @body: ggg ) )" }, { "@title hello world | @body sample program", "( ( @title: hello ) AND ( ( @title: world ) OR ( @body: sample ) ) AND ( @body: program ) )" }, { "@title one two three four", "( ( @title: one ) AND ( @title: two ) AND ( @title: three ) AND ( @title: four ) )" }, { "@title one (@body two three) four", "( ( @title: one ) AND ( ( @body: two ) AND ( @body: three ) ) AND ( @title: four ) )" }, { "windows 7 2000", "( windows AND 2000 )" }, { "aaa a|bbb", "( aaa AND bbb )" }, { "aaa bbb|x y z|ccc", "( aaa AND bbb AND ccc )" }, { "a", "" }, { "hello -world", "( hello AND NOT world )" }, { "-hello world", "( world AND NOT hello )" }, { "\"phrase (query)/3 ~on steroids\"", "\"phrase query on steroids\"" }, { "hello a world", "( hello AND world )" }, { "-one", "" }, { "-one -two", "" }, { "\"\"", "" }, { "\"()\"", "" }, { "\"]\"", "" }, { "@title hello @body -world", "( ( @title: hello ) AND NOT ( @body: world ) )" }, { "Ms-Dos", "MS-DOS" } }; int nTests = sizeof(dTest)/sizeof(dTest[0]); for ( int i=0; i=0; i++ ) pRow += sphPackStrlen ( pRow, dValues[i] ); const BYTE * pUnp = dBuffer; for ( int i=0; dValues[i]>=0; i++ ) { int iUnp = sphUnpackStr ( pUnp, &pUnp ); assert ( iUnp==dValues[i] ); } printf ( "ok\n" ); } #endif ////////////////////////////////////////////////////////////////////////// void BenchLocators () { const int MAX_ITEMS = 10; const int NUM_MATCHES = 1000; const int NUM_RUNS = 100000; CSphRowitem dStatic[MAX_ITEMS]; CSphRowitem dDynamic[MAX_ITEMS]; CSphAttrLocator tLoc[NUM_MATCHES]; CSphMatch tMatch[NUM_MATCHES]; for ( int i=0; i(1+i) ) ); } for ( int i=0; iLock(); for ( int i=0; i<100; i++ ) g_iMutexBench++; g_iMutexBench -= 99; pMutex->Unlock(); } void BenchThreads () { printf ( "benchmarking threads\n" ); const int BATCHES = 100; const int BATCH_THREADS = 100; const int TOTAL_THREADS = BATCHES*BATCH_THREADS; SphThread_t * pThd = new SphThread_t [ BATCH_THREADS ]; CSphMutex tMutex; if ( !tMutex.Init() ) sphDie ( "failed to init mutex" ); for ( int iRun=1; iRun<=2; iRun++ ) { int64_t tmThd = sphMicroTimer(); for ( int iBatch=0; iBatch>3 ) & 1; } typedef void (*SortDataGen_fn)( DWORD *, int ); struct SortDataGenDesc_t { SortDataGen_fn m_fnGen; const char * m_sName; }; SortDataGenDesc_t g_dSortDataGens[] = { { SortDataRepeat1245, "repeat1245" }, { SortDataEnd0, "end0" }, { SortDataIdentical, "identical" }, { SortDataMed3Killer, "med3killer" }, { SortDataMidKiller, "midkiller" }, { SortDataRandDupes, "randdupes" }, { SortDataRandUniq, "randuniq" }, { SortDataRandSteps, "randsteps" }, { SortDataRevEnds, "revends" }, { SortDataRevPartial, "revpartial" }, { SortDataRevSaw, "revsaw" }, { SortDataReverse, "reverse" }, { SortDataStart1000, "start1000" }, { SortDataSeqPartial, "seqpartial" }, { SortDataSeqSaw, "seqsaw" }, { SortDataSeq, "sequential" }, { SortDataAscDesc, "ascdesc" }, { SortDataDescAsc, "descasc" }, { SortDataRand01, "rand01" }, }; struct SortPayload_t { DWORD m_uKey; DWORD m_uPayload[3]; bool operator < ( const SortPayload_t & rhs ) const { return m_uKey < rhs.m_uKey; } }; inline bool operator < ( const CSphWordHit & a, const CSphWordHit & b ) { return ( a.m_iWordID int64_t BenchSort ( T * pData, int iCount, bool bCheck ) { int64_t tmSort = sphMicroTimer(); sphSort ( pData, iCount ); tmSort = sphMicroTimer() - tmSort; if ( bCheck ) { for ( int i=0; i 1 ? sphCRC32 ( ( ( const BYTE * ) ( pData + 1 ) ), ( m_iStride - 1 ) * 4 ) : ( *pData ); } }; #ifndef NDEBUG static bool IsSorted ( DWORD * pData, int iCount, const TestAccCmp_fn & fn ) { if ( iCount<1 ) return true; const DWORD * pPrev = pData; if ( !fn.IsKeyDataSynced ( pPrev ) ) return false; if ( iCount<2 ) return true; for ( int i = 1; i < iCount; ++i ) { const DWORD * pCurr = fn.Add ( pData, i ); if ( fn.IsLess ( *pCurr , *pPrev ) || !fn.IsKeyDataSynced ( pCurr ) ) return false; pPrev = pCurr; } return true; } #endif void RandomFill ( DWORD * pData, int iCount, const TestAccCmp_fn & fn, bool bChainsaw ) { for ( int i = 0; i < iCount; ++i ) { DWORD * pCurr = fn.Add ( pData, i ); const DWORD * pNext = fn.Add ( pData, i + 1 ); DWORD * pElem = pCurr; DWORD * pChainHill = bChainsaw && ( i % 2 ) ? fn.Add ( pData, i -1 ) : NULL; do { *pElem = pChainHill ? *pChainHill / 2 : sphRand(); ++pElem; pChainHill = pChainHill ? pChainHill + 1 : pChainHill; } while ( pElem!=pNext ); *pCurr = fn.GenerateKey ( pCurr ); } } void TestStridedSortPass ( int iStride, int iCount ) { printf ( "testing strided sort, stride=%d, count=%d... ", iStride, iCount ); assert ( iStride && iCount ); DWORD * pData = new DWORD [ iCount * iStride ]; assert ( pData ); // checked elements are random memset ( pData, 0, sizeof ( DWORD ) * iCount * iStride ); TestAccCmp_fn fnSort ( iStride ); RandomFill ( pData, iCount, fnSort, false ); // crash on sort of mini-arrays TestAccCmp_fn fnSortDummy ( 1 ); DWORD dMini[1] = { 1 }; sphSort ( dMini, 1, fnSortDummy, fnSortDummy ); sphSort ( dMini, 0, fnSortDummy, fnSortDummy ); assert ( IsSorted ( dMini, 1, fnSortDummy ) ); // random sort sphSort ( pData, iCount, fnSort, fnSort ); assert ( IsSorted ( pData, iCount, fnSort ) ); // already sorted sort sphSort ( pData, iCount, fnSort, fnSort ); assert ( IsSorted ( pData, iCount, fnSort ) ); // reverse order sort for ( int i = 0; i < iCount; ++i ) { ::Swap ( pData[i], pData [ iCount - i - 1 ] ); } sphSort ( pData, iCount, fnSort, fnSort ); assert ( IsSorted ( pData, iCount, fnSort ) ); // random chainsaw sort RandomFill ( pData, iCount, fnSort, true ); sphSort ( pData, iCount, fnSort, fnSort ); assert ( IsSorted ( pData, iCount, fnSort ) ); printf ( "ok\n" ); SafeDeleteArray ( pData ); } void TestStridedSort () { TestStridedSortPass ( 1, 2 ); TestStridedSortPass ( 3, 2 ); TestStridedSortPass ( 37, 2 ); // SMALL_THRESH case TestStridedSortPass ( 1, 30 ); TestStridedSortPass ( 7, 13 ); TestStridedSortPass ( 113, 5 ); TestStridedSortPass ( 1, 1000 ); TestStridedSortPass ( 5, 1000 ); TestStridedSortPass ( 17, 50 ); TestStridedSortPass ( 31, 1367 ); // rand cases for ( int i = 0; i < 10; ++i ) { const int iRndStride = sphRand() % 64; const int iNrmStride = Max ( iRndStride, 1 ); const int iRndCount = sphRand() % 1000; const int iNrmCount = Max ( iRndCount, 1 ); TestStridedSortPass ( iNrmStride, iNrmCount ); } } ////////////////////////////////////////////////////////////////////////// const char * g_sFieldsData[] = { "33", "1033", "If I were a cat...", "We are the greatest cat" }; class SphTestDoc_c : public CSphSource_Document { public: explicit SphTestDoc_c ( const CSphSchema & tSchema ) : CSphSource_Document ( "test_doc" ) { m_tSchema = tSchema; } virtual BYTE ** NextDocument ( CSphString & ) { if ( m_tDocInfo.m_iDocID ) { m_tDocInfo.m_iDocID = 0; return NULL; } m_tDocInfo.m_iDocID++; return (BYTE **) &g_sFieldsData[2]; } bool Connect ( CSphString & ) { return true; } void Disconnect () {} bool HasAttrsConfigured () { return true; } bool IterateStart ( CSphString & ) { m_tDocInfo.Reset ( m_tSchema.GetRowSize() ); return true; } bool IterateMultivaluedStart ( int, CSphString & ) { return false; } bool IterateMultivaluedNext () { return false; } bool IterateFieldMVAStart ( int, CSphString & ) { return false; } bool IterateFieldMVANext () { return false; } bool IterateKillListStart ( CSphString & ) { return false; } bool IterateKillListNext ( SphDocID_t & ) { return false; } }; #ifndef NDEBUG static void CheckRT ( int iVal, int iRef, const char * sMsg ) { #if 1 assert ( iRef==iVal && sMsg ); #else if ( iRef!=iVal ) printf ( "\t%s=%d ( %d )\n", sMsg, iVal, iRef ); #endif } static void DeleteIndexFiles ( const char * sIndex ) { if ( !sIndex ) return; CSphString sName; sName.SetSprintf ( "%s.kill", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.lock", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.meta", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.ram", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spa", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spd", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.sph", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spi", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spk", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spm", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.spp", sIndex ); unlink ( sName.cstr() ); sName.SetSprintf ( "%s.0.sps", sIndex ); unlink ( sName.cstr() ); } void TestRTInit () { CSphConfigSection tRTConfig; sphRTInit(); sphRTConfigure ( tRTConfig, true ); SmallStringHash_T hIndexes; sphReplayBinlog ( hIndexes, 0 ); } #define RT_INDEX_FILE_NAME "test_temp" #define RT_PASS_COUNT 5 static const int g_iWeights[RT_PASS_COUNT] = { 1500, 1500, 1500, 1500, 1500 }; // { 1500, 1302, 1252, 1230, 1219 }; void TestRTWeightBoundary () { DeleteIndexFiles ( RT_INDEX_FILE_NAME ); for ( int iPass = 0; iPass < RT_PASS_COUNT; ++iPass ) { printf ( "testing rt indexing, test %d/%d... ", 1+iPass, RT_PASS_COUNT ); TestRTInit (); CSphString sError; CSphDictSettings tDictSettings; ISphTokenizer * pTok = sphCreateUTF8Tokenizer(); CSphDict * pDict = sphCreateDictionaryCRC ( tDictSettings, pTok, sError, "weight" ); CSphColumnInfo tCol; CSphSchema tSrcSchema; CSphSourceSettings tParams; tSrcSchema.Reset(); tCol.m_sName = "channel_id"; tCol.m_eAttrType = SPH_ATTR_INTEGER; tSrcSchema.AddAttr ( tCol, true ); tCol.m_sName = "title"; tSrcSchema.m_dFields.Add ( tCol ); tCol.m_sName = "content"; tSrcSchema.m_dFields.Add ( tCol ); SphTestDoc_c * pSrc = new SphTestDoc_c ( tSrcSchema ); pSrc->SetTokenizer ( pTok ); pSrc->SetDict ( pDict ); pSrc->Setup ( tParams ); Verify ( pSrc->Connect ( sError ) ); Verify ( pSrc->IterateStart ( sError ) ); Verify ( pSrc->UpdateSchema ( &tSrcSchema, sError ) ); CSphSchema tSchema; // source schema must be all dynamic attrs; but index ones must be static tSchema.m_dFields = tSrcSchema.m_dFields; for ( int i=0; iSetTokenizer ( pTok->Clone ( false ) ); pIndex->SetDictionary ( pDict->Clone() ); Verify ( pIndex->Prealloc ( false, false, sError ) ); ISphHits * pHits; CSphVector dMvas; for ( ;; ) { Verify ( pSrc->IterateDocument ( sError ) ); if ( !pSrc->m_tDocInfo.m_iDocID ) break; pHits = pSrc->IterateHits ( sError ); if ( !pHits ) break; pIndex->AddDocument ( pHits, pSrc->m_tDocInfo, NULL, dMvas, sError ); pIndex->Commit (); } pSrc->Disconnect(); CheckRT ( pSrc->GetStats().m_iTotalDocuments, 1, "docs committed" ); CSphQuery tQuery; CSphQueryResult tResult; tQuery.m_sQuery = "@title cat"; ISphMatchSorter * pSorter = sphCreateQueue ( &tQuery, pIndex->GetMatchSchema(), tResult.m_sError, false ); assert ( pSorter ); Verify ( pIndex->MultiQuery ( &tQuery, &tResult, 1, &pSorter, NULL ) ); sphFlattenQueue ( pSorter, &tResult, 0 ); CheckRT ( tResult.m_dMatches.GetLength(), 1, "results found" ); CheckRT ( (int)tResult.m_dMatches[0].m_iDocID, 1, "docID" ); CheckRT ( tResult.m_dMatches[0].m_iWeight, g_iWeights[iPass], "weight" ); SafeDelete ( pSorter ); SafeDelete ( pIndex ); SafeDelete ( pDict ); SafeDelete ( pTok ); sphRTDone (); printf ( "ok\n" ); } DeleteIndexFiles ( RT_INDEX_FILE_NAME ); } void TestWriter() { printf ( "testing CSphWriter... " ); const CSphString sTmpWriteout = "__writeout.tmp"; CSphString sErr; #define WRITE_OUT_DATA_SIZE 0x40000 BYTE * pData = new BYTE[WRITE_OUT_DATA_SIZE]; memset ( pData, 0xfe, WRITE_OUT_DATA_SIZE ); { CSphWriter tWrDef; tWrDef.OpenFile ( sTmpWriteout, sErr ); tWrDef.PutBytes ( pData, WRITE_OUT_DATA_SIZE ); tWrDef.PutByte ( 0xff ); } { CSphWriter tWr; tWr.SetBufferSize ( WRITE_OUT_DATA_SIZE ); tWr.OpenFile ( sTmpWriteout, sErr ); tWr.PutBytes ( pData, WRITE_OUT_DATA_SIZE ); tWr.PutByte ( 0xff ); } unlink ( sTmpWriteout.cstr() ); printf ( "ok\n" ); } class SphDocRandomizer_c : public CSphSource_Document { static const int m_iMaxFields = 2; static const int m_iMaxFieldLen = 512; char m_dFields[m_iMaxFields][m_iMaxFieldLen]; BYTE * m_ppFields[m_iMaxFields]; public: explicit SphDocRandomizer_c ( const CSphSchema & tSchema ) : CSphSource_Document ( "test_doc" ) { m_tSchema = tSchema; for ( int i=0; i800 ) { m_tDocInfo.m_iDocID = 0; return NULL; } m_tDocInfo.m_iDocID++; m_tDocInfo.SetAttr ( m_tSchema.GetAttr(0).m_tLocator, m_tDocInfo.m_iDocID+1000 ); m_tDocInfo.SetAttr ( m_tSchema.GetAttr(1).m_tLocator, 1313 ); snprintf ( m_dFields[0], m_iMaxFieldLen, "cat title%d title%d title%d title%d title%d" , sphRand(), sphRand(), sphRand(), sphRand(), sphRand() ); snprintf ( m_dFields[1], m_iMaxFieldLen, "dog contentwashere%d contentwashere%d contentwashere%d contentwashere%d contentwashere%d" , sphRand(), sphRand(), sphRand(), sphRand(), sphRand() ); return &m_ppFields[0]; } bool Connect ( CSphString & ) { return true; } void Disconnect () {} bool HasAttrsConfigured () { return true; } bool IterateStart ( CSphString & ) { m_tDocInfo.Reset ( m_tSchema.GetRowSize() ); return true; } bool IterateMultivaluedStart ( int, CSphString & ) { return false; } bool IterateMultivaluedNext () { return false; } bool IterateFieldMVAStart ( int, CSphString & ) { return false; } bool IterateFieldMVANext () { return false; } bool IterateKillListStart ( CSphString & ) { return false; } bool IterateKillListNext ( SphDocID_t & ) { return false; } }; void TestRTSendVsMerge () { DeleteIndexFiles ( RT_INDEX_FILE_NAME ); printf ( "testing rt send result during merge... " ); TestRTInit (); CSphString sError; CSphDictSettings tDictSettings; ISphTokenizer * pTok = sphCreateUTF8Tokenizer(); CSphDict * pDict = sphCreateDictionaryCRC ( tDictSettings, pTok, sError, "rt" ); CSphColumnInfo tCol; CSphSchema tSrcSchema; CSphSourceSettings tParams; tSrcSchema.Reset(); tCol.m_sName = "title"; tSrcSchema.m_dFields.Add ( tCol ); tCol.m_sName = "content"; tSrcSchema.m_dFields.Add ( tCol ); tCol.m_sName = "tag1"; tCol.m_eAttrType = SPH_ATTR_INTEGER; tSrcSchema.AddAttr ( tCol, true ); tCol.m_sName = "tag2"; tCol.m_eAttrType = SPH_ATTR_INTEGER; tSrcSchema.AddAttr ( tCol, true ); SphDocRandomizer_c * pSrc = new SphDocRandomizer_c ( tSrcSchema ); pSrc->SetTokenizer ( pTok ); pSrc->SetDict ( pDict ); pSrc->Setup ( tParams ); Verify ( pSrc->Connect ( sError ) ); Verify ( pSrc->IterateStart ( sError ) ); Verify ( pSrc->UpdateSchema ( &tSrcSchema, sError ) ); CSphSchema tSchema; // source schema must be all dynamic attrs; but index ones must be static tSchema.m_dFields = tSrcSchema.m_dFields; for ( int i=0; iSetTokenizer ( pTok ); // index will own this pair from now on pIndex->SetDictionary ( pDict ); Verify ( pIndex->Prealloc ( false, false, sError ) ); CSphQuery tQuery; CSphQueryResult tResult; tQuery.m_sQuery = "@title cat"; ISphMatchSorter * pSorter = sphCreateQueue ( &tQuery, pIndex->GetMatchSchema(), tResult.m_sError, false ); assert ( pSorter ); CSphVector dMvas; for ( ;; ) { Verify ( pSrc->IterateDocument ( sError ) ); if ( !pSrc->m_tDocInfo.m_iDocID ) break; ISphHits * pHits = pSrc->IterateHits ( sError ); if ( !pHits ) break; pIndex->AddDocument ( pHits, pSrc->m_tDocInfo, NULL, dMvas, sError ); if ( pSrc->m_tDocInfo.m_iDocID==350 ) { pIndex->Commit (); Verify ( pIndex->MultiQuery ( &tQuery, &tResult, 1, &pSorter, NULL ) ); sphFlattenQueue ( pSorter, &tResult, 0 ); } } pIndex->Commit (); pSrc->Disconnect(); for ( int i=0; iSetCaseFolding ( "-, 0..9, A..Z->a..z, _, a..z, U+80..U+FF", sError ) ); // assert ( pTok->SetBlendChars ( "., &", sError ) ); // NOLINT assert ( pTok->EnableSentenceIndexing ( sError ) ); const char * SENTENCE = "\2"; // MUST be in sync with sphinx.cpp const char * sTest[] = { "Bill Gates Jr. attended", "bill", "gates", "jr", "attended", NULL, "Very good, Dr. Watson", "very", "good", "dr", "watson", NULL, "VERY GOOD, DR. WATSON", "very", "good", "dr", "watson", NULL, "He left US. Went abroad", "he", "left", "us", SENTENCE, "went", "abroad", NULL, "Known as Mr. Doe", "known", "as", "mr", "doe", NULL, "Survived by Mrs. Doe", "survived", "by", "mrs", "doe", NULL, "J. R. R. Tolkien", "j", "r", "r", "tolkien", NULL, "That is it. A boundary", "that", "is", "it", SENTENCE, "a", "boundary", NULL, "Just a sentence. And then some.", "just", "a", "sentence", SENTENCE, "and", "then", "some", SENTENCE, NULL, "Right, guy number two? Yes, guy number one!", "right", "guy", "number", "two", SENTENCE, "yes", "guy", "number", "one", SENTENCE, NULL, "S.T.A.L.K.E.R. sold well in the U.K and elsewhere. Including Russia.", "s", "t", "a", "l", "k", "e", "r", "sold", "well", "in", "the", "u", "k", "and", "elsewhere", SENTENCE, "including", "russia", SENTENCE, NULL, "Yoyodine Inc. exists since 1800", "yoyodine", "inc", "exists", "since", "1800", NULL, "John D. Doe, our CEO", "john", "d", "doe", "our", "ceo", NULL, "Yoyodine Inc. (the Company)", "yoyodine", "inc", "the", "company", NULL, NULL }; int i = 0; while ( sTest[i] ) { pTok->SetBuffer ( (BYTE*)sTest[i], strlen ( sTest[i] ) ); i++; BYTE * sTok; while ( ( sTok = pTok->GetToken() )!=NULL ) { assert ( !strcmp ( (char*)sTok, sTest[i] ) ); i++; } assert ( sTest[i]==NULL ); i++; } printf ( "ok\n" ); } ////////////////////////////////////////////////////////////////////////// void TestSpanSearch() { printf ( "testing span search... " ); CSphVector dVec; dVec.Add ( 1 ); dVec.Add ( 3 ); dVec.Add ( 4 ); assert ( FindSpan ( dVec, 1, 5 )==0 ); assert ( FindSpan ( dVec, 3, 5 )==1 ); assert ( FindSpan ( dVec, 4, 5 )==2 ); dVec.Add ( 15 ); dVec.Add ( 17 ); dVec.Add ( 22 ); dVec.Add ( 23 ); assert ( FindSpan ( dVec, 1, 5 )==0 ); assert ( FindSpan ( dVec, 18, 5 )==4 ); assert ( FindSpan ( dVec, 23, 5 )==6 ); printf ( "ok\n" ); } ////////////////////////////////////////////////////////////////////////// const char * CORPUS = "corpus.txt"; const int POOLSIZE = 80*1048576; const int GAP = 4; void BenchStemmer () { CSphString sError; #if SNOWBALL SN_env * pSnow = english_ISO_8859_1_create_env(); #if 1 char test[] = "this"; SN_set_current ( pSnow, strlen(test), (const symbol *)test ); pSnow->p [ pSnow->l ] = 0; english_ISO_8859_1_stem ( pSnow ); stem_en ( (BYTE*)test, strlen(test) ); #endif #endif #if PORTER1 struct stemmer * z = create_stemmer(); #endif BYTE * pRaw = new BYTE [ POOLSIZE ]; FILE * fp = fopen ( CORPUS, "rb" ); if ( !fp ) sphDie ( "fopen %s failed", CORPUS ); int iLen = fread ( pRaw, 1, POOLSIZE, fp ); printf ( "read %d bytes\n", iLen ); fclose ( fp ); ISphTokenizer * pTok = sphCreateSBCSTokenizer(); if ( !pTok->SetCaseFolding ( "A..Z->a..z, a..z", sError ) ) sphDie ( "oops: %s", sError.cstr() ); pTok->SetBuffer ( pRaw, iLen ); BYTE * pTokens = new BYTE [ POOLSIZE ]; BYTE * p = pTokens; BYTE * sTok; int iToks = 0; int iBytes = 0; int iStemmed = 0; while ( ( sTok = pTok->GetToken() )!=NULL ) { BYTE * pStart = p++; // 1 byte for length while ( *sTok ) *p++ = *sTok++; *pStart = (BYTE)( p-pStart-1 ); // store length for ( int i=0; i=pTokens+POOLSIZE ) sphDie ( "out of buffer at tok %d", iToks ); iToks++; } *p++ = '\0'; iBytes = (int)( p - pTokens ); printf ( "tokenized %d tokens\n", iToks ); #if 0 int dCharStats[256]; memset ( dCharStats, 0, sizeof(dCharStats) ); for ( BYTE * t = pTokens; tp, pSnow->l ); p[pSnow->l+1] = 0; #else // crosscheck char buf[256]; memcpy ( buf, p+1, *p+1 ); stem_en ( p+1, *p ); int ll = strlen ( (char*)p+1 ); if ( ll!=pSnow->l || memcmp ( p+1, pSnow->p, ll ) ) { pSnow->p[pSnow->l] = 0; printf ( "%s[%d] vs %s[%d] for orig %s\n", p+1, ll, pSnow->p, pSnow->l, buf ); iDiff++; } #endif #endif #if PORTER1 p [ stem ( z, (char*)p+1, *p-1 )+2 ] = 0; #endif p += *p + GAP + 1; iToks++; } tmStem = sphMicroTimer() - tmStem; if ( iDiff ) printf ( "%d tokens are different\n", iDiff ); if ( iStemmed ) printf ( "%d data bytes stemmed\n", iStemmed ); #if SNOWBALL english_ISO_8859_1_close_env ( pSnow ); #endif uint64_t uHash = sphFNV64 ( pTokens, iBytes ); printf ( "stemmed %d tokens (%d bytes) in %d msec, hash %08x %08x\n", iToks, iBytes, (int)(tmStem/1000), (DWORD)( uHash>>32 ), (DWORD)( uHash & 0xffffffffUL ) ); if ( uHash!=U64C ( 0x54ef4f21994b67db ) ) printf ( "ERROR, HASH MISMATCH\n" ); SafeDelete ( pTok ); SafeDeleteArray ( pRaw ); } int main () { // threads should be initialized before memory allocations char cTopOfMainStack; sphThreadInit(); MemorizeStack ( &cTopOfMainStack ); printf ( "RUNNING INTERNAL LIBSPHINX TESTS\n\n" ); #if 0 BenchSort (); #endif #ifdef NDEBUG BenchStripper (); BenchTokenizer ( false ); BenchTokenizer ( true ); BenchExpr (); BenchLocators (); BenchThreads (); #else TestQueryParser (); TestStripper (); TestTokenizer ( false ); TestTokenizer ( true ); TestExpr (); TestMisc (); TestRwlock (); TestCleanup (); TestStridedSort (); TestRTWeightBoundary (); TestWriter(); TestRTSendVsMerge (); TestSentenceTokenizer (); TestSpanSearch (); #endif unlink ( g_sTmpfile ); printf ( "\nSUCCESS\n" ); return 0; } // // $Id: tests.cpp 3130 2012-03-01 07:43:56Z tomat $ // sphinx-2.0.4-release/src/testrt.cpp0000644000176700017710000001767211723624274016610 0ustar deogardeogar// // $Id: testrt.cpp 3130 2012-03-01 07:43:56Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxrt.h" #include "sphinxutils.h" #if USE_WINDOWS #include "psapi.h" #pragma comment(linker, "/defaultlib:psapi.lib") #pragma message("Automatically linking with psapi.lib") #endif const int COMMIT_STEP = 1; float g_fTotalMB = 0.0f; void SetupIndexing ( CSphSource_MySQL * pSrc, const CSphSourceParams_MySQL & tParams ) { CSphString sError; if ( !pSrc->Setup ( tParams ) ) sphDie ( "setup failed" ); if ( !pSrc->Connect ( sError ) ) sphDie ( "connect failed: %s", sError.cstr() ); if ( !pSrc->IterateStart ( sError ) ) sphDie ( "iterate-start failed: %s", sError.cstr() ); } void DoSearch ( CSphIndex * pIndex ) { printf ( "---\nsearching... " ); CSphQuery tQuery; CSphQueryResult tResult; tQuery.m_sQuery = "@title cat"; ISphMatchSorter * pSorter = sphCreateQueue ( &tQuery, pIndex->GetMatchSchema(), tResult.m_sError, false ); if ( !pSorter ) { printf ( "failed to create sorter; error=%s", tResult.m_sError.cstr() ); } else if ( !pIndex->MultiQuery ( &tQuery, &tResult, 1, &pSorter, NULL ) ) { printf ( "query failed; error=%s", pIndex->GetLastError().cstr() ); } else { sphFlattenQueue ( pSorter, &tResult, 0 ); printf ( "%d results found in %d.%03d sec!\n", tResult.m_dMatches.GetLength(), tResult.m_iQueryTime/1000, tResult.m_iQueryTime%1000 ); ARRAY_FOREACH ( i, tResult.m_dMatches ) printf ( "%d. id=" DOCID_FMT ", weight=%d\n", 1+i, tResult.m_dMatches[i].m_iDocID, tResult.m_dMatches[i].m_iWeight ); } SafeDelete ( pSorter ); printf ( "---\n" ); } void DoIndexing ( CSphSource * pSrc, ISphRtIndex * pIndex ) { CSphString sError; CSphVector dMvas; int64_t tmStart = sphMicroTimer (); int64_t tmAvgCommit = 0; int64_t tmMaxCommit = 0; int iCommits = 0; for ( ;; ) { if ( !pSrc->IterateDocument ( sError ) ) sphDie ( "iterate-document failed: %s", sError.cstr() ); ISphHits * pHitsNext = pSrc->IterateHits ( sError ); if ( !sError.IsEmpty() ) sphDie ( "iterate-hits failed: %s", sError.cstr() ); if ( pSrc->m_tDocInfo.m_iDocID ) pIndex->AddDocument ( pHitsNext, pSrc->m_tDocInfo, NULL, dMvas, sError ); if ( ( pSrc->GetStats().m_iTotalDocuments % COMMIT_STEP )==0 || !pSrc->m_tDocInfo.m_iDocID ) { int64_t tmCommit = sphMicroTimer(); pIndex->Commit (); tmCommit = sphMicroTimer()-tmCommit; iCommits++; tmAvgCommit += tmCommit; tmMaxCommit = Max ( tmMaxCommit, tmCommit ); if ( !pSrc->m_tDocInfo.m_iDocID ) { tmAvgCommit /= iCommits; break; } } if (!( pSrc->GetStats().m_iTotalDocuments % 100 )) printf ( "%d docs\r", (int)pSrc->GetStats().m_iTotalDocuments ); static bool bOnce = true; if ( iCommits*COMMIT_STEP>=5000 && bOnce ) { printf ( "\n" ); DoSearch ( pIndex ); bOnce = false; } } pSrc->Disconnect(); int64_t tmEnd = sphMicroTimer (); float fTotalMB = (float)pSrc->GetStats().m_iTotalBytes/1000000.0f; printf ( "commit-step %d, %d docs, %d bytes, %d.%03d sec, %.2f MB/sec\n", COMMIT_STEP, (int)pSrc->GetStats().m_iTotalDocuments, (int)pSrc->GetStats().m_iTotalBytes, (int)((tmEnd-tmStart)/1000000), (int)(((tmEnd-tmStart)%1000000)/1000), fTotalMB*1000000.0f/(tmEnd-tmStart) ); printf ( "commit-docs %d, avg %d.%03d msec, max %d.%03d msec\n", COMMIT_STEP, (int)(tmAvgCommit/1000), (int)(tmAvgCommit%1000), (int)(tmMaxCommit/1000), (int)(tmMaxCommit%1000) ); g_fTotalMB += fTotalMB; } CSphSource * SpawnSource ( const char * sQuery, ISphTokenizer * pTok, CSphDict * pDict ) { CSphSource_MySQL * pSrc = new CSphSource_MySQL ( "test" ); pSrc->SetTokenizer ( pTok ); pSrc->SetDict ( pDict ); CSphSourceParams_MySQL tParams; tParams.m_sHost = "localhost"; tParams.m_sUser = "root"; tParams.m_sDB = "lj"; tParams.m_dQueryPre.Add ( "SET NAMES utf8" ); tParams.m_sQuery = sQuery; CSphColumnInfo tCol; tCol.m_eAttrType = SPH_ATTR_INTEGER; tCol.m_sName = "channel_id"; tParams.m_dAttrs.Add ( tCol ); tCol.m_eAttrType = SPH_ATTR_TIMESTAMP; tCol.m_sName = "published"; tParams.m_dAttrs.Add ( tCol ); SetupIndexing ( pSrc, tParams ); return pSrc; } static ISphRtIndex * g_pIndex = NULL; void IndexingThread ( void * pArg ) { CSphSource * pSrc = (CSphSource *) pArg; DoIndexing ( pSrc, g_pIndex ); } int main () { // threads should be initialized before memory allocations char cTopOfMainStack; sphThreadInit(); MemorizeStack ( &cTopOfMainStack ); CSphString sError; CSphDictSettings tDictSettings; ISphTokenizer * pTok = sphCreateUTF8Tokenizer(); CSphDict * pDict = sphCreateDictionaryCRC ( tDictSettings, pTok, sError, "rt1" ); CSphSource * pSrc = SpawnSource ( "SELECT id, channel_id, UNIX_TIMESTAMP(published) published, title, UNCOMPRESS(content) content FROM posting WHERE id<=10000 AND id%2=0", pTok, pDict ); ISphTokenizer * pTok2 = sphCreateUTF8Tokenizer(); CSphDict * pDict2 = sphCreateDictionaryCRC ( tDictSettings, pTok, sError, "rt2" ); CSphSource * pSrc2 = SpawnSource ( "SELECT id, channel_id, UNIX_TIMESTAMP(published) published, title, UNCOMPRESS(content) content FROM posting WHERE id<=10000 AND id%2=1", pTok2, pDict2 ); CSphSchema tSrcSchema; if ( !pSrc->UpdateSchema ( &tSrcSchema, sError ) ) sphDie ( "update-schema failed: %s", sError.cstr() ); CSphSchema tSchema; // source schema must be all dynamic attrs; but index ones must be static tSchema.m_dFields = tSrcSchema.m_dFields; for ( int i=0; i dTemp; sphReplayBinlog ( dTemp, 0 ); ISphRtIndex * pIndex = sphCreateIndexRT ( tSchema, "testrt", 32*1024*1024, "data/dump", false ); pIndex->SetTokenizer ( pTok ); // index will own this pair from now on pIndex->SetDictionary ( pDict ); if ( !pIndex->Prealloc ( false, false, sError ) ) sphDie ( "prealloc failed: %s", pIndex->GetLastError().cstr() ); g_pIndex = pIndex; // initial indexing int64_t tmStart = sphMicroTimer(); SphThread_t t1, t2; sphThreadCreate ( &t1, IndexingThread, pSrc ); sphThreadCreate ( &t2, IndexingThread, pSrc2 ); sphThreadJoin ( &t1 ); sphThreadJoin ( &t2 ); #if 0 // update tParams.m_sQuery = "SELECT id, channel_id, UNIX_TIMESTAMP(published) published, title, UNCOMPRESS(content) content FROM rt2 WHERE id<=10000"; SetupIndexing ( pSrc, tParams ); DoIndexing ( pSrc, pIndex ); #endif // search DoSearch ( pIndex ); // shutdown index (should cause dump) int64_t tmShutdown = sphMicroTimer(); #if SPH_ALLOCS_PROFILER printf ( "pre-shutdown allocs=%d, bytes="INT64_FMT"\n", sphAllocsCount(), sphAllocBytes() ); #endif SafeDelete ( pIndex ); #if SPH_ALLOCS_PROFILER printf ( "post-shutdown allocs=%d, bytes="INT64_FMT"\n", sphAllocsCount(), sphAllocBytes() ); #endif int64_t tmEnd = sphMicroTimer(); printf ( "shutdown done in %d.%03d sec\n", (int)((tmEnd-tmShutdown)/1000000), (int)(((tmEnd-tmShutdown)%1000000)/1000) ); printf ( "total with shutdown %d.%03d sec, %.2f MB/sec\n", (int)((tmEnd-tmStart)/1000000), (int)(((tmEnd-tmStart)%1000000)/1000), g_fTotalMB*1000000.0f/(tmEnd-tmStart) ); #if SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER sphAllocsStats(); #endif #if USE_WINDOWS PROCESS_MEMORY_COUNTERS pmc; HANDLE hProcess = OpenProcess ( PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, FALSE, GetCurrentProcessId() ); if ( hProcess && GetProcessMemoryInfo ( hProcess, &pmc, sizeof(pmc)) ) { printf ( "--- peak-wss=%d, peak-pagefile=%d\n", (int)pmc.PeakWorkingSetSize, (int)pmc.PeakPagefileUsage ); } #endif SafeDelete ( pIndex ); sphRTDone (); } // // $Id: testrt.cpp 3130 2012-03-01 07:43:56Z tomat $ // sphinx-2.0.4-release/src/sphinxsort.cpp0000644000176700017710000026471211711621267017477 0ustar deogardeogar// // $Id: sphinxsort.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxint.h" #include #include #if !USE_WINDOWS #include #include #endif ////////////////////////////////////////////////////////////////////////// // TRAITS ////////////////////////////////////////////////////////////////////////// /// groupby key type typedef int64_t SphGroupKey_t; /// base grouper (class that computes groupby key) class CSphGrouper { public: virtual ~CSphGrouper () {} virtual SphGroupKey_t KeyFromValue ( SphAttr_t uValue ) const = 0; virtual SphGroupKey_t KeyFromMatch ( const CSphMatch & tMatch ) const = 0; virtual void GetLocator ( CSphAttrLocator & tOut ) const = 0; virtual ESphAttr GetResultType () const = 0; virtual void SetStringPool ( const BYTE * ) {} }; /// match-sorting priority queue traits class CSphMatchQueueTraits : public ISphMatchSorter, ISphNoncopyable { protected: CSphMatch * m_pData; int m_iUsed; int m_iSize; CSphMatchComparatorState m_tState; const bool m_bUsesAttrs; public: /// ctor CSphMatchQueueTraits ( int iSize, bool bUsesAttrs ) : m_iUsed ( 0 ) , m_iSize ( iSize ) , m_bUsesAttrs ( bUsesAttrs ) { assert ( iSize>0 ); m_pData = new CSphMatch [ iSize ]; assert ( m_pData ); m_tState.m_iNow = (DWORD) time ( NULL ); } /// dtor ~CSphMatchQueueTraits () { SafeDeleteArray ( m_pData ); } public: void SetState ( const CSphMatchComparatorState & tState ) { m_tState = tState; m_tState.m_iNow = (DWORD) time ( NULL ); } bool UsesAttrs () const { return m_bUsesAttrs; } virtual CSphMatch * Finalize () { return m_pData; } virtual int GetLength () const { return m_iUsed; } }; ////////////////////////////////////////////////////////////////////////// // PLAIN SORTING QUEUE ////////////////////////////////////////////////////////////////////////// /// normal match-sorting priority queue template < typename COMP > class CSphMatchQueue : public CSphMatchQueueTraits { public: /// ctor CSphMatchQueue ( int iSize, bool bUsesAttrs ) : CSphMatchQueueTraits ( iSize, bUsesAttrs ) {} /// check if this sorter does groupby virtual bool IsGroupby () const { return false; } /// add entry to the queue virtual bool Push ( const CSphMatch & tEntry ) { m_iTotal++; if ( m_iUsed==m_iSize ) { // if it's worse that current min, reject it, else pop off current min if ( COMP::IsLess ( tEntry, m_pData[0], m_tState ) ) return true; else Pop (); } // do add m_pData[m_iUsed].Clone ( tEntry, m_tSchema.GetDynamicSize() ); int iEntry = m_iUsed++; // sift up if needed, so that worst (lesser) ones float to the top while ( iEntry ) { int iParent = ( iEntry-1 ) >> 1; if ( !COMP::IsLess ( m_pData[iEntry], m_pData[iParent], m_tState ) ) break; // entry is less than parent, should float to the top Swap ( m_pData[iEntry], m_pData[iParent] ); iEntry = iParent; } return true; } /// add grouped entry (must not happen) virtual bool PushGrouped ( const CSphMatch & ) { assert ( 0 ); return false; } /// remove root (ie. top priority) entry virtual void Pop () { assert ( m_iUsed ); if ( !(--m_iUsed) ) // empty queue? just return return; // make the last entry my new root Swap ( m_pData[0], m_pData[m_iUsed] ); // sift down if needed int iEntry = 0; for ( ;; ) { // select child int iChild = (iEntry<<1) + 1; if ( iChild>=m_iUsed ) break; // select smallest child if ( iChild+1=0 ); pTo += m_iUsed; while ( m_iUsed>0 ) { --pTo; pTo[0].Clone ( m_pData[0], m_tSchema.GetDynamicSize() ); // OPTIMIZE? reset dst + swap? if ( iTag>=0 ) pTo->m_iTag = iTag; Pop (); } m_iTotal = 0; } }; /// collector for UPDATE statement class CSphUpdateQueue : public CSphMatchQueueTraits { CSphAttrUpdateEx* m_pUpdate; private: void DoUpdate() { if ( !m_iUsed ) return; CSphAttrUpdate tSet; tSet.m_dAttrs = m_pUpdate->m_pUpdate->m_dAttrs; tSet.m_dPool = m_pUpdate->m_pUpdate->m_dPool; tSet.m_dRowOffset.Resize ( m_iUsed ); if ( !DOCINFO2ID ( STATIC2DOCINFO ( m_pData->m_pStatic ) ) ) // if static attrs were copied, so, they actually dynamic { tSet.m_dDocids.Resize ( m_iUsed ); ARRAY_FOREACH ( i, tSet.m_dDocids ) { tSet.m_dDocids[i] = m_pData[i].m_iDocID; tSet.m_dRowOffset[i] = 0; } } else // static attrs points to the active indexes - so, no lookup, 5 times faster update. { tSet.m_dRows.Resize ( m_iUsed ); ARRAY_FOREACH ( i, tSet.m_dRows ) { tSet.m_dRows[i] = m_pData[i].m_pStatic - ( sizeof(SphDocID_t) / sizeof(CSphRowitem) ); tSet.m_dRowOffset[i] = 0; } } m_pUpdate->m_iAffected += m_pUpdate->m_pIndex->UpdateAttributes ( tSet, -1, *m_pUpdate->m_pError ); m_iUsed = 0; } public: /// ctor CSphUpdateQueue ( int iSize, CSphAttrUpdateEx* pUpdate ) : CSphMatchQueueTraits ( iSize, true ) , m_pUpdate ( pUpdate ) {} /// check if this sorter does groupby virtual bool IsGroupby () const { return false; } /// add entry to the queue virtual bool Push ( const CSphMatch & tEntry ) { m_iTotal++; if ( m_iUsed==m_iSize ) DoUpdate(); // do add m_pData[m_iUsed++].Clone ( tEntry, m_tSchema.GetDynamicSize() ); return true; } /// add grouped entry (must not happen) virtual bool PushGrouped ( const CSphMatch & ) { assert ( 0 ); return false; } /// store all entries into specified location in sorted order, and remove them from queue void Flatten ( CSphMatch *, int ) { assert ( m_iUsed>=0 ); DoUpdate(); m_iTotal = 0; } }; ////////////////////////////////////////////////////////////////////////// // SORTING+GROUPING QUEUE ////////////////////////////////////////////////////////////////////////// static bool IsCount ( const CSphString & s ) { return s=="@count" || s=="count(*)"; } static bool IsGroupby ( const CSphString & s ) { return s=="@groupby" || s=="@distinct"; } static bool IsGroupbyMagic ( const CSphString & s ) { return IsGroupby ( s ) || IsCount ( s ); } /// groupers #define GROUPER_BEGIN(_name) \ class _name : public CSphGrouper \ { \ protected: \ CSphAttrLocator m_tLocator; \ public: \ explicit _name ( const CSphAttrLocator & tLoc ) : m_tLocator ( tLoc ) {} \ virtual void GetLocator ( CSphAttrLocator & tOut ) const { tOut = m_tLocator; } \ virtual ESphAttr GetResultType () const { return m_tLocator.m_iBitCount>8*(int)sizeof(DWORD) ? SPH_ATTR_BIGINT : SPH_ATTR_INTEGER; } \ virtual SphGroupKey_t KeyFromMatch ( const CSphMatch & tMatch ) const { return KeyFromValue ( tMatch.GetAttr ( m_tLocator ) ); } \ virtual SphGroupKey_t KeyFromValue ( SphAttr_t uValue ) const \ { // NOLINT #define GROUPER_END \ } \ }; #define GROUPER_BEGIN_SPLIT(_name) \ GROUPER_BEGIN(_name) \ time_t tStamp = (time_t)uValue; \ struct tm * pSplit = localtime ( &tStamp ); GROUPER_BEGIN ( CSphGrouperAttr ) return uValue; GROUPER_END GROUPER_BEGIN_SPLIT ( CSphGrouperDay ) return (pSplit->tm_year+1900)*10000 + (1+pSplit->tm_mon)*100 + pSplit->tm_mday; GROUPER_END GROUPER_BEGIN_SPLIT ( CSphGrouperWeek ) int iPrevSunday = (1+pSplit->tm_yday) - pSplit->tm_wday; // prev Sunday day of year, base 1 int iYear = pSplit->tm_year+1900; if ( iPrevSunday<=0 ) // check if we crossed year boundary { // adjust day and year iPrevSunday += 365; iYear--; // adjust for leap years if ( iYear%4==0 && ( iYear%100!=0 || iYear%400==0 ) ) iPrevSunday++; } return iYear*1000 + iPrevSunday; GROUPER_END GROUPER_BEGIN_SPLIT ( CSphGrouperMonth ) return (pSplit->tm_year+1900)*100 + (1+pSplit->tm_mon); GROUPER_END GROUPER_BEGIN_SPLIT ( CSphGrouperYear ) return (pSplit->tm_year+1900); GROUPER_END template class CSphGrouperString : public CSphGrouperAttr, public PRED { private: const BYTE * m_pStringBase; public: explicit CSphGrouperString ( const CSphAttrLocator & tLoc ) : CSphGrouperAttr ( tLoc ) , m_pStringBase ( NULL ) { } virtual ESphAttr GetResultType () const { return SPH_ATTR_BIGINT; } virtual SphGroupKey_t KeyFromValue ( SphAttr_t uValue ) const { if ( !m_pStringBase || !uValue ) return 0; const BYTE * pStr = NULL; int iLen = sphUnpackStr ( m_pStringBase+uValue, &pStr ); if ( !pStr || !iLen ) return 0; return PRED::Hash ( pStr, iLen ); } virtual void SetStringPool ( const BYTE * pStrings ) { m_pStringBase = pStrings; } }; ////////////////////////////////////////////////////////////////////////// /// simple fixed-size hash /// doesn't keep the order template < typename T, typename KEY, typename HASHFUNC > class CSphFixedHash : ISphNoncopyable { protected: static const int HASH_LIST_END = -1; static const int HASH_DELETED = -2; struct HashEntry_t { KEY m_tKey; T m_tValue; int m_iNext; }; protected: CSphVector m_dEntries; ///< key-value pairs storage pool CSphVector m_dHash; ///< hash into m_dEntries pool int m_iFree; ///< free pairs count CSphVector m_dFree; ///< free pair indexes public: /// ctor explicit CSphFixedHash ( int iLength ) { int iBuckets = ( 2 << sphLog2 ( iLength-1 ) ); // less than 50% bucket usage guaranteed assert ( iLength>0 ); assert ( iLength<=iBuckets ); m_dEntries.Resize ( iLength ); m_dHash.Resize ( iBuckets ); m_dFree.Resize ( iLength ); Reset (); } /// cleanup void Reset () { ARRAY_FOREACH ( i, m_dEntries ) m_dEntries[i].m_iNext = HASH_DELETED; ARRAY_FOREACH ( i, m_dHash ) m_dHash[i] = HASH_LIST_END; m_iFree = m_dFree.GetLength(); ARRAY_FOREACH ( i, m_dFree ) m_dFree[i] = i; } /// add new entry /// returns NULL on success /// returns pointer to value if already hashed T * Add ( const T & tValue, const KEY & tKey ) { assert ( m_iFree>0 && "hash overflow" ); // check if it's already hashed DWORD uHash = DWORD ( HASHFUNC::Hash ( tKey ) ) & ( m_dHash.GetLength()-1 ); int iPrev = -1, iEntry; for ( iEntry=m_dHash[uHash]; iEntry>=0; iPrev=iEntry, iEntry=m_dEntries[iEntry].m_iNext ) if ( m_dEntries[iEntry].m_tKey==tKey ) return &m_dEntries[iEntry].m_tValue; assert ( iEntry!=HASH_DELETED ); // if it's not, do add int iNew = m_dFree [ --m_iFree ]; HashEntry_t & tNew = m_dEntries[iNew]; assert ( tNew.m_iNext==HASH_DELETED ); tNew.m_tKey = tKey; tNew.m_tValue = tValue; tNew.m_iNext = HASH_LIST_END; if ( iPrev>=0 ) { assert ( m_dEntries[iPrev].m_iNext==HASH_LIST_END ); m_dEntries[iPrev].m_iNext = iNew; } else { assert ( m_dHash[uHash]==HASH_LIST_END ); m_dHash[uHash] = iNew; } return NULL; } /// remove entry from hash void Remove ( const KEY & tKey ) { // check if it's already hashed DWORD uHash = DWORD ( HASHFUNC::Hash ( tKey ) ) & ( m_dHash.GetLength()-1 ); int iPrev = -1, iEntry; for ( iEntry=m_dHash[uHash]; iEntry>=0; iPrev=iEntry, iEntry=m_dEntries[iEntry].m_iNext ) if ( m_dEntries[iEntry].m_tKey==tKey ) { // found, remove it assert ( m_dEntries[iEntry].m_iNext!=HASH_DELETED ); if ( iPrev>=0 ) m_dEntries[iPrev].m_iNext = m_dEntries[iEntry].m_iNext; else m_dHash[uHash] = m_dEntries[iEntry].m_iNext; #ifndef NDEBUG m_dEntries[iEntry].m_iNext = HASH_DELETED; #endif m_dFree [ m_iFree++ ] = iEntry; return; } assert ( iEntry!=HASH_DELETED ); } /// get value pointer by key T * operator () ( const KEY & tKey ) const { DWORD uHash = DWORD ( HASHFUNC::Hash ( tKey ) ) & ( m_dHash.GetLength()-1 ); int iEntry; for ( iEntry=m_dHash[uHash]; iEntry>=0; iEntry=m_dEntries[iEntry].m_iNext ) if ( m_dEntries[iEntry].m_tKey==tKey ) return (T*)&m_dEntries[iEntry].m_tValue; assert ( iEntry!=HASH_DELETED ); return NULL; } }; ///////////////////////////////////////////////////////////////////////////// /// (group,attrvalue) pair struct SphGroupedValue_t { public: SphGroupKey_t m_uGroup; SphAttr_t m_uValue; public: SphGroupedValue_t () {} SphGroupedValue_t ( SphGroupKey_t uGroup, SphAttr_t uValue ) : m_uGroup ( uGroup ) , m_uValue ( uValue ) {} inline bool operator < ( const SphGroupedValue_t & rhs ) const { if ( m_uGrouprhs.m_uGroup ) return false; return m_uValue { public: #ifndef NDEBUG CSphUniqounter () : m_iCountPos ( 0 ), m_bSorted ( true ) { Reserve ( 16384 ); } void Add ( const SphGroupedValue_t & tValue ) { CSphVector::Add ( tValue ); m_bSorted = false; } void Sort () { CSphVector::Sort (); m_bSorted = true; } #else CSphUniqounter () : m_iCountPos ( 0 ) {} #endif public: int CountStart ( SphGroupKey_t * pOutGroup ); ///< starting counting distinct values, returns count and group key (0 if empty) int CountNext ( SphGroupKey_t * pOutGroup ); ///< continues counting distinct values, returns count and group key (0 if done) void Compact ( SphGroupKey_t * pRemoveGroups, int iRemoveGroups ); protected: int m_iCountPos; #ifndef NDEBUG bool m_bSorted; #endif }; int CSphUniqounter::CountStart ( SphGroupKey_t * pOutGroup ) { m_iCountPos = 0; return CountNext ( pOutGroup ); } int CSphUniqounter::CountNext ( SphGroupKey_t * pOutGroup ) { assert ( m_bSorted ); if ( m_iCountPos>=m_iLength ) return 0; SphGroupKey_t uGroup = m_pData[m_iCountPos].m_uGroup; SphAttr_t uValue = m_pData[m_iCountPos].m_uValue; *pOutGroup = uGroup; int iCount = 1; while ( m_iCountPosm_uGroup ) { pRemoveGroups++; iRemoveGroups--; } for ( ; pSrcm_uGroup ) { pRemoveGroups++; iRemoveGroups--; } if ( iRemoveGroups && pSrc->m_uGroup==*pRemoveGroups ) continue; // check if it's a dupe if ( pDst>m_pData && pDst[-1]==pSrc[0] ) continue; *pDst++ = *pSrc; } assert ( pDst-m_pData<=m_iLength ); m_iLength = pDst-m_pData; } ///////////////////////////////////////////////////////////////////////////// /// attribute magic enum { SPH_VATTR_ID = -1, ///< tells match sorter to use doc id SPH_VATTR_RELEVANCE = -2, ///< tells match sorter to use match weight SPH_VATTR_FLOAT = 10000 ///< tells match sorter to compare floats }; /// match comparator interface from group-by sorter point of view struct ISphMatchComparator { virtual ~ISphMatchComparator () {} virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & tState ) const = 0; }; /// additional group-by sorter settings struct CSphGroupSorterSettings { CSphAttrLocator m_tLocGroupby; ///< locator for @groupby CSphAttrLocator m_tLocCount; ///< locator for @count CSphAttrLocator m_tLocDistinct; ///< locator for @distinct CSphAttrLocator m_tDistinctLoc; ///< locator for attribute to compute count(distinct) for bool m_bDistinct; ///< whether we need distinct bool m_bMVA; ///< whether we're grouping by MVA attribute bool m_bMva64; CSphGrouper * m_pGrouper; ///< group key calculator CSphGroupSorterSettings () : m_bDistinct ( false ) , m_bMVA ( false ) , m_bMva64 ( false ) , m_pGrouper ( NULL ) {} }; #if USE_WINDOWS #pragma warning(disable:4127) #endif /// aggregate function interface class IAggrFunc { public: virtual ~IAggrFunc() {} virtual void Ungroup ( CSphMatch * ) {} virtual void Update ( CSphMatch * pDst, const CSphMatch * pSrc, bool bGrouped ) = 0; virtual void Finalize ( CSphMatch * ) {} }; /// aggregate traits for different attribute types template < typename T > class IAggrFuncTraits : public IAggrFunc { public: explicit IAggrFuncTraits ( const CSphAttrLocator & tLocator ) : m_tLocator ( tLocator ) {} inline T GetValue ( const CSphMatch * pRow ); inline void SetValue ( CSphMatch * pRow, T val ); protected: CSphAttrLocator m_tLocator; }; template<> DWORD IAggrFuncTraits::GetValue ( const CSphMatch * pRow ) { return (DWORD)pRow->GetAttr ( m_tLocator ); } template<> void IAggrFuncTraits::SetValue ( CSphMatch * pRow, DWORD val ) { pRow->SetAttr ( m_tLocator, val ); } template<> int64_t IAggrFuncTraits::GetValue ( const CSphMatch * pRow ) { return pRow->GetAttr ( m_tLocator ); } template<> void IAggrFuncTraits::SetValue ( CSphMatch * pRow, int64_t val ) { pRow->SetAttr ( m_tLocator, val ); } template<> float IAggrFuncTraits::GetValue ( const CSphMatch * pRow ) { return pRow->GetAttrFloat ( m_tLocator ); } template<> void IAggrFuncTraits::SetValue ( CSphMatch * pRow, float val ) { pRow->SetAttrFloat ( m_tLocator, val ); } /// SUM() implementation template < typename T > class AggrSum_t : public IAggrFuncTraits { public: explicit AggrSum_t ( const CSphAttrLocator & tLoc ) : IAggrFuncTraits ( tLoc ) {} virtual void Update ( CSphMatch * pDst, const CSphMatch * pSrc, bool ) { this->SetValue ( pDst, this->GetValue(pDst)+this->GetValue(pSrc) ); } }; /// AVG() implementation template < typename T > class AggrAvg_t : public IAggrFuncTraits { protected: CSphAttrLocator m_tCountLoc; public: AggrAvg_t ( const CSphAttrLocator & tLoc, const CSphAttrLocator & tCountLoc ) : IAggrFuncTraits ( tLoc ), m_tCountLoc ( tCountLoc ) {} virtual void Ungroup ( CSphMatch * pDst ) { this->SetValue ( pDst, T ( this->GetValue ( pDst ) * pDst->GetAttr ( m_tCountLoc ) ) ); } virtual void Update ( CSphMatch * pDst, const CSphMatch * pSrc, bool bGrouped ) { if ( bGrouped ) this->SetValue ( pDst, T ( this->GetValue ( pDst ) + this->GetValue ( pSrc ) * pSrc->GetAttr ( m_tCountLoc ) ) ); else this->SetValue ( pDst, this->GetValue ( pDst ) + this->GetValue ( pSrc ) ); } virtual void Finalize ( CSphMatch * pDst ) { this->SetValue ( pDst, T ( this->GetValue ( pDst ) / pDst->GetAttr ( m_tCountLoc ) ) ); } }; /// MAX() implementation template < typename T > class AggrMax_t : public IAggrFuncTraits { public: explicit AggrMax_t ( const CSphAttrLocator & tLoc ) : IAggrFuncTraits ( tLoc ) {} virtual void Update ( CSphMatch * pDst, const CSphMatch * pSrc, bool ) { this->SetValue ( pDst, Max ( this->GetValue(pDst), this->GetValue(pSrc) ) ); } }; /// MIN() implementation template < typename T > class AggrMin_t : public IAggrFuncTraits { public: explicit AggrMin_t ( const CSphAttrLocator & tLoc ) : IAggrFuncTraits ( tLoc ) {} virtual void Update ( CSphMatch * pDst, const CSphMatch * pSrc, bool ) { this->SetValue ( pDst, Min ( this->GetValue(pDst), this->GetValue(pSrc) ) ); } }; /// group sorting functor template < typename COMPGROUP > struct GroupSorter_fn : public CSphMatchComparatorState, public SphAccessor_T { typedef CSphMatch MEDIAN_TYPE; int m_iDynamic; GroupSorter_fn () { m_iDynamic = 0; } void CopyKey ( MEDIAN_TYPE * pMed, CSphMatch * pVal ) const { pMed->Clone ( *pVal, m_iDynamic ); } bool IsLess ( const CSphMatch & a, const CSphMatch & b ) const { return COMPGROUP::IsLess ( b, a, *this ); } // inherited swap does not work on gcc void Swap ( CSphMatch * a, CSphMatch * b ) const { ::Swap ( *a, *b ); } }; /// match sorter with k-buffering and group-by template < typename COMPGROUP, bool DISTINCT > class CSphKBufferGroupSorter : public CSphMatchQueueTraits { protected: ESphGroupBy m_eGroupBy; ///< group-by function CSphGrouper * m_pGrouper; CSphFixedHash < CSphMatch *, SphGroupKey_t, IdentityHash_fn > m_hGroup2Match; protected: int m_iLimit; ///< max matches to be retrieved CSphUniqounter m_tUniq; bool m_bSortByDistinct; GroupSorter_fn m_tGroupSorter; const ISphMatchComparator * m_pComp; CSphGroupSorterSettings m_tSettings; CSphVector m_dAggregates; CSphVector m_dAvgs; int m_iPregroupDynamic; ///< how much dynamic attributes are computed by the index (before groupby sorter) static const int GROUPBY_FACTOR = 4; ///< allocate this times more storage when doing group-by (k, as in k-buffer) public: /// ctor CSphKBufferGroupSorter ( const ISphMatchComparator * pComp, const CSphQuery * pQuery, const CSphGroupSorterSettings & tSettings ) // FIXME! make k configurable : CSphMatchQueueTraits ( pQuery->m_iMaxMatches*GROUPBY_FACTOR, true ) , m_eGroupBy ( pQuery->m_eGroupFunc ) , m_pGrouper ( tSettings.m_pGrouper ) , m_hGroup2Match ( pQuery->m_iMaxMatches*GROUPBY_FACTOR ) , m_iLimit ( pQuery->m_iMaxMatches ) , m_bSortByDistinct ( false ) , m_pComp ( pComp ) , m_tSettings ( tSettings ) , m_iPregroupDynamic ( 0 ) { assert ( GROUPBY_FACTOR>1 ); assert ( DISTINCT==false || tSettings.m_tDistinctLoc.m_iBitOffset>=0 ); } /// schema setup virtual void SetSchema ( const CSphSchema & tSchema ) { m_tSchema = tSchema; m_tGroupSorter.m_iDynamic = m_tSchema.GetDynamicSize(); bool bAggrStarted = false; for ( int i=0; i ( tAttr.m_tLocator ) ); break; case SPH_ATTR_BIGINT: m_dAggregates.Add ( new AggrSum_t ( tAttr.m_tLocator ) ); break; case SPH_ATTR_FLOAT: m_dAggregates.Add ( new AggrSum_t ( tAttr.m_tLocator ) ); break; default: assert ( 0 && "internal error: unhandled aggregate type" ); break; } break; case SPH_AGGR_AVG: switch ( tAttr.m_eAttrType ) { case SPH_ATTR_INTEGER: m_dAggregates.Add ( new AggrAvg_t ( tAttr.m_tLocator, m_tSettings.m_tLocCount ) ); break; case SPH_ATTR_BIGINT: m_dAggregates.Add ( new AggrAvg_t ( tAttr.m_tLocator, m_tSettings.m_tLocCount ) ); break; case SPH_ATTR_FLOAT: m_dAggregates.Add ( new AggrAvg_t ( tAttr.m_tLocator, m_tSettings.m_tLocCount ) ); break; default: assert ( 0 && "internal error: unhandled aggregate type" ); break; } // store avg to calculate these attributes prior to groups sort for ( int iState=0; iState ( tAttr.m_tLocator ) ); break; case SPH_ATTR_BIGINT: m_dAggregates.Add ( new AggrMin_t ( tAttr.m_tLocator ) ); break; case SPH_ATTR_FLOAT: m_dAggregates.Add ( new AggrMin_t ( tAttr.m_tLocator ) ); break; default: assert ( 0 && "internal error: unhandled aggregate type" ); break; } break; case SPH_AGGR_MAX: switch ( tAttr.m_eAttrType ) { case SPH_ATTR_INTEGER: m_dAggregates.Add ( new AggrMax_t ( tAttr.m_tLocator ) ); break; case SPH_ATTR_BIGINT: m_dAggregates.Add ( new AggrMax_t ( tAttr.m_tLocator ) ); break; case SPH_ATTR_FLOAT: m_dAggregates.Add ( new AggrMax_t ( tAttr.m_tLocator ) ); break; default: assert ( 0 && "internal error: unhandled aggregate type" ); break; } break; default: assert ( 0 && "internal error: unhandled aggregate function" ); break; } } } /// dtor ~CSphKBufferGroupSorter () { SafeDelete ( m_pComp ); SafeDelete ( m_pGrouper ); } /// check if this sorter does groupby virtual bool IsGroupby () const { return true; } /// set string pool pointer (for string+groupby sorters) void SetStringPool ( const BYTE * pStrings ) { m_pGrouper->SetStringPool ( pStrings ); } /// add entry to the queue virtual bool Push ( const CSphMatch & tEntry ) { SphGroupKey_t uGroupKey = m_pGrouper->KeyFromMatch ( tEntry ); return PushEx ( tEntry, uGroupKey, false ); } /// add grouped entry to the queue virtual bool PushGrouped ( const CSphMatch & tEntry ) { return PushEx ( tEntry, tEntry.GetAttr ( m_tSettings.m_tLocGroupby ), true ); } /// add entry to the queue virtual bool PushEx ( const CSphMatch & tEntry, const SphGroupKey_t uGroupKey, bool bGrouped ) { // if this group is already hashed, we only need to update the corresponding match CSphMatch ** ppMatch = m_hGroup2Match ( uGroupKey ); if ( ppMatch ) { CSphMatch * pMatch = (*ppMatch); assert ( pMatch ); assert ( pMatch->GetAttr ( m_tSettings.m_tLocGroupby )==uGroupKey ); assert ( pMatch->m_pDynamic[-1]==tEntry.m_pDynamic[-1] ); if ( bGrouped ) { // it's already grouped match // sum grouped matches count pMatch->SetAttr ( m_tSettings.m_tLocCount, pMatch->GetAttr ( m_tSettings.m_tLocCount ) + tEntry.GetAttr ( m_tSettings.m_tLocCount ) ); // OPTIMIZE! AddAttr()? if ( DISTINCT ) pMatch->SetAttr ( m_tSettings.m_tLocDistinct, pMatch->GetAttr ( m_tSettings.m_tLocDistinct ) + tEntry.GetAttr ( m_tSettings.m_tLocDistinct ) ); } else { // it's a simple match // increase grouped matches count pMatch->SetAttr ( m_tSettings.m_tLocCount, 1 + pMatch->GetAttr ( m_tSettings.m_tLocCount ) ); // OPTIMIZE! IncAttr()? } // update aggregates ARRAY_FOREACH ( i, m_dAggregates ) m_dAggregates[i]->Update ( pMatch, &tEntry, bGrouped ); // if new entry is more relevant, update from it if ( m_pComp->VirtualIsLess ( *pMatch, tEntry, m_tState ) ) { // can't use Clone() here; must keep current aggregate values pMatch->m_iDocID = tEntry.m_iDocID; pMatch->m_iWeight = tEntry.m_iWeight; pMatch->m_pStatic = tEntry.m_pStatic; pMatch->m_iTag = tEntry.m_iTag; if ( m_iPregroupDynamic ) { assert ( pMatch->m_pDynamic ); assert ( tEntry.m_pDynamic ); assert ( pMatch->m_pDynamic[-1]==tEntry.m_pDynamic[-1] ); for ( int i=0; im_pDynamic[i] = tEntry.m_pDynamic[i]; } } } // submit actual distinct value in all cases if ( DISTINCT && !bGrouped ) m_tUniq.Add ( SphGroupedValue_t ( uGroupKey, tEntry.GetAttr ( m_tSettings.m_tDistinctLoc ) ) ); // OPTIMIZE! use simpler locator here? // it's a dupe anyway, so we shouldn't update total matches count if ( ppMatch ) return false; // if we're full, let's cut off some worst groups if ( m_iUsed==m_iSize ) CutWorst ( m_iLimit * (int)(GROUPBY_FACTOR/2) ); // do add assert ( m_iUsedUngroup ( &tNew ); } m_hGroup2Match.Add ( &tNew, uGroupKey ); m_iTotal++; return true; } void CalcAvg ( bool bGroup ) { if ( !m_dAvgs.GetLength() ) return; CSphMatch * pMatch = m_pData; CSphMatch * pEnd = pMatch + m_iUsed; while ( pMatchFinalize ( pMatch ); else m_dAvgs[j]->Ungroup ( pMatch ); } ++pMatch; } } /// store all entries into specified location in sorted order, and remove them from queue void Flatten ( CSphMatch * pTo, int iTag ) { CountDistinct (); CalcAvg ( true ); SortGroups (); CSphVector dAggrs; if ( m_dAggregates.GetLength()!=m_dAvgs.GetLength() ) { dAggrs = m_dAggregates; ARRAY_FOREACH ( i, m_dAvgs ) dAggrs.RemoveValue ( m_dAvgs[i] ); } int iLen = GetLength (); for ( int i=0; iFinalize ( &m_pData[i] ); pTo->Clone ( m_pData[i], m_tSchema.GetDynamicSize() ); if ( iTag>=0 ) pTo->m_iTag = iTag; } m_iUsed = 0; m_iTotal = 0; m_hGroup2Match.Reset (); if ( DISTINCT ) m_tUniq.Resize ( 0 ); } /// get entries count int GetLength () const { return Min ( m_iUsed, m_iLimit ); } /// set group comparator state void SetGroupState ( const CSphMatchComparatorState & tState ) { m_tGroupSorter.m_fnStrCmp = tState.m_fnStrCmp; // FIXME! manual bitwise copying.. yuck for ( int i=0; i=0 ) for ( int i=0; iSetAttr ( m_tSettings.m_tLocDistinct, iCount ); } } } /// cut worst N groups off the buffer tail void CutWorst ( int iCut ) { // sort groups if ( m_bSortByDistinct ) CountDistinct (); CalcAvg ( true ); SortGroups (); CalcAvg ( false ); // cut groups m_iUsed -= iCut; // cleanup unused distinct stuff if ( DISTINCT ) { // build kill-list CSphVector dRemove; dRemove.Resize ( iCut ); for ( int i=0; im_iLimit ) CutWorst ( m_iUsed - m_iLimit ); return m_pData; } }; /// match sorter with k-buffering and group-by for MVAs template < typename COMPGROUP, bool DISTINCT > class CSphKBufferMVAGroupSorter : public CSphKBufferGroupSorter < COMPGROUP, DISTINCT > { protected: const DWORD * m_pMva; ///< pointer to MVA pool for incoming matches CSphAttrLocator m_tMvaLocator; bool m_bMva64; public: /// ctor CSphKBufferMVAGroupSorter ( const ISphMatchComparator * pComp, const CSphQuery * pQuery, const CSphGroupSorterSettings & tSettings ) : CSphKBufferGroupSorter < COMPGROUP, DISTINCT > ( pComp, pQuery, tSettings ) , m_pMva ( NULL ) , m_bMva64 ( tSettings.m_bMva64 ) { this->m_pGrouper->GetLocator ( m_tMvaLocator ); } /// check if this sorter does groupby virtual bool IsGroupby () { return true; } /// set MVA pool for subsequent matches void SetMVAPool ( const DWORD * pMva ) { m_pMva = pMva; } /// add entry to the queue virtual bool Push ( const CSphMatch & tEntry ) { assert ( m_pMva ); if ( !m_pMva ) return false; // get that list // FIXME! OPTIMIZE! use simpler locator than full bits/count here // FIXME! hardcoded MVA type, so here's MVA_DOWNSIZE marker for searching const DWORD * pValues = tEntry.GetAttrMVA ( this->m_tMvaLocator, m_pMva ); // (this pointer is for gcc; it doesn't work otherwise) if ( !pValues ) return false; DWORD iValues = *pValues++; bool bRes = false; if ( m_bMva64 ) { assert ( ( iValues%2 )==0 ); for ( ;iValues>0; iValues-=2, pValues+=2 ) { uint64_t uMva = MVA_UPSIZE ( pValues ); SphGroupKey_t uGroupkey = this->m_pGrouper->KeyFromValue ( uMva ); bRes |= this->PushEx ( tEntry, uGroupkey, false ); } } else { while ( iValues-- ) { SphGroupKey_t uGroupkey = this->m_pGrouper->KeyFromValue ( *pValues++ ); bRes |= this->PushEx ( tEntry, uGroupkey, false ); } } return bRes; } /// add pre-grouped entry to the queue virtual bool PushGrouped ( const CSphMatch & tEntry ) { // re-group it based on the group key // (first 'this' is for icc; second 'this' is for gcc) return this->PushEx ( tEntry, tEntry.GetAttr ( this->m_tSettings.m_tLocGroupby ), true ); } }; #if USE_WINDOWS #pragma warning(default:4127) #endif ////////////////////////////////////////////////////////////////////////// // PLAIN SORTING FUNCTORS ////////////////////////////////////////////////////////////////////////// /// match sorter struct MatchRelevanceLt_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & ) { if ( a.m_iWeight!=b.m_iWeight ) return a.m_iWeight < b.m_iWeight; return a.m_iDocID > b.m_iDocID; }; }; /// match sorter struct MatchAttrLt_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { if ( t.m_eKeypart[0]!=SPH_KEYPART_STRING ) { SphAttr_t aa = a.GetAttr ( t.m_tLocator[0] ); SphAttr_t bb = b.GetAttr ( t.m_tLocator[0] ); if ( aa!=bb ) return aa b.m_iDocID; }; }; /// match sorter struct MatchAttrGt_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { if ( t.m_eKeypart[0]!=SPH_KEYPART_STRING ) { SphAttr_t aa = a.GetAttr ( t.m_tLocator[0] ); SphAttr_t bb = b.GetAttr ( t.m_tLocator[0] ); if ( aa!=bb ) return aa>bb; } else { int iCmp = t.CmpStrings ( a, b, 0 ); if ( iCmp!=0 ) return iCmp>0; } if ( a.m_iWeight!=b.m_iWeight ) return a.m_iWeight < b.m_iWeight; return a.m_iDocID > b.m_iDocID; }; }; /// match sorter struct MatchTimeSegments_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { SphAttr_t aa = a.GetAttr ( t.m_tLocator[0] ); SphAttr_t bb = b.GetAttr ( t.m_tLocator[0] ); int iA = GetSegment ( aa, t.m_iNow ); int iB = GetSegment ( bb, t.m_iNow ); if ( iA!=iB ) return iA > iB; if ( a.m_iWeight!=b.m_iWeight ) return a.m_iWeight < b.m_iWeight; if ( aa!=bb ) return aa b.m_iDocID; }; protected: static inline int GetSegment ( SphAttr_t iStamp, SphAttr_t iNow ) { if ( iStamp>=iNow-3600 ) return 0; // last hour if ( iStamp>=iNow-24*3600 ) return 1; // last day if ( iStamp>=iNow-7*24*3600 ) return 2; // last week if ( iStamp>=iNow-30*24*3600 ) return 3; // last month if ( iStamp>=iNow-90*24*3600 ) return 4; // last 3 months return 5; // everything else } }; /// match sorter struct MatchExpr_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { float aa = a.GetAttrFloat ( t.m_tLocator[0] ); // FIXME! OPTIMIZE!!! simplified (dword-granular) getter could be used here float bb = b.GetAttrFloat ( t.m_tLocator[0] ); if ( aa!=bb ) return aab.m_iDocID; } }; ///////////////////////////////////////////////////////////////////////////// #define SPH_TEST_PAIR(_aa,_bb,_idx ) \ if ( (_aa)!=(_bb) ) \ return ( (t.m_uAttrDesc >> (_idx)) & 1 ) ^ ( (_aa) > (_bb) ); #define SPH_TEST_KEYPART(_idx) \ switch ( t.m_eKeypart[_idx] ) \ { \ case SPH_KEYPART_ID: SPH_TEST_PAIR ( a.m_iDocID, b.m_iDocID, _idx ); break; \ case SPH_KEYPART_WEIGHT: SPH_TEST_PAIR ( a.m_iWeight, b.m_iWeight, _idx ); break; \ case SPH_KEYPART_INT: \ { \ register SphAttr_t aa = a.GetAttr ( t.m_tLocator[_idx] ); \ register SphAttr_t bb = b.GetAttr ( t.m_tLocator[_idx] ); \ SPH_TEST_PAIR ( aa, bb, _idx ); \ break; \ } \ case SPH_KEYPART_FLOAT: \ { \ register float aa = a.GetAttrFloat ( t.m_tLocator[_idx] ); \ register float bb = b.GetAttrFloat ( t.m_tLocator[_idx] ); \ SPH_TEST_PAIR ( aa, bb, _idx ) \ break; \ } \ case SPH_KEYPART_STRING: \ { \ int iCmp = t.CmpStrings ( a, b, _idx ); \ if ( iCmp!=0 ) \ return ( ( t.m_uAttrDesc >> (_idx) ) & 1 ) ^ ( iCmp>0 ); \ break; \ } \ } struct MatchGeneric2_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { SPH_TEST_KEYPART(0); SPH_TEST_KEYPART(1); return false; }; }; struct MatchGeneric3_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { SPH_TEST_KEYPART(0); SPH_TEST_KEYPART(1); SPH_TEST_KEYPART(2); return false; }; }; struct MatchGeneric4_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { SPH_TEST_KEYPART(0); SPH_TEST_KEYPART(1); SPH_TEST_KEYPART(2); SPH_TEST_KEYPART(3); return false; }; }; struct MatchGeneric5_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { SPH_TEST_KEYPART(0); SPH_TEST_KEYPART(1); SPH_TEST_KEYPART(2); SPH_TEST_KEYPART(3); SPH_TEST_KEYPART(4); return false; }; }; ////////////////////////////////////////////////////////////////////////// struct MatchCustom_fn : public ISphMatchComparator { virtual bool VirtualIsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) const { return IsLess ( a, b, t ); } // setup sorting state static bool SetupAttr ( const CSphSchema & tSchema, CSphMatchComparatorState & tState, CSphString & sError, int iIdx, const char * sAttr ) { if ( iIdx>=CSphMatchComparatorState::MAX_ATTRS ) { sError.SetSprintf ( "custom sort: too many attributes declared" ); return false; } int iAttr = tSchema.GetAttrIndex(sAttr); if ( iAttr<0 ) { sError.SetSprintf ( "custom sort: attr '%s' not found in schema", sAttr ); return false; } const CSphColumnInfo & tAttr = tSchema.GetAttr(iAttr); tState.m_eKeypart[iIdx] = tAttr.m_eAttrType==SPH_ATTR_FLOAT ? SPH_KEYPART_FLOAT : SPH_KEYPART_INT; tState.m_tLocator[iIdx] = tAttr.m_tLocator; return true; } // setup sorting state static bool Setup ( const CSphSchema & tSchema, CSphMatchComparatorState & tState, CSphString & sError ) { float fTmp; int iAttr = 0; #define MATCH_FUNCTION fTmp #define MATCH_WEIGHT 1.0f #define MATCH_NOW 1.0f #define MATCH_ATTR(_idx) 1.0f #define MATCH_DECLARE_ATTR(_name) if ( !SetupAttr ( tSchema, tState, sError, iAttr++, _name ) ) return false; #include "sphinxcustomsort.inl" ; // NOLINT return true; } // calc function and compare matches // OPTIMIZE? could calc once per match on submit static inline bool IsLess ( const CSphMatch & a, const CSphMatch & b, const CSphMatchComparatorState & t ) { #undef MATCH_DECLARE_ATTR #undef MATCH_WEIGHT #undef MATCH_NOW #undef MATCH_ATTR #define MATCH_DECLARE_ATTR(_name) ; // NOLINT #define MATCH_WEIGHT float(MATCH_VAR.m_iWeight) #define MATCH_NOW float(t.m_iNow) #define MATCH_ATTR(_idx) float(MATCH_VAR.GetAttr(t.m_tLocator[_idx])) float aa, bb; #undef MATCH_FUNCTION #undef MATCH_VAR #define MATCH_FUNCTION aa #define MATCH_VAR a #include "sphinxcustomsort.inl" // NOLINT ; // NOLINT #undef MATCH_FUNCTION #undef MATCH_VAR #define MATCH_FUNCTION bb #define MATCH_VAR b #include "sphinxcustomsort.inl" // NOLINT ; // NOLINT return aaa..z, _, a..z, @ if ( ( c>='0' && c<='9' ) || ( c>='a' && c<='z' ) || c=='_' || c=='@' ) return c; if ( c>='A' && c<='Z' ) return c-'A'+'a'; return 0; } public: explicit SortClauseTokenizer_t ( const char * sBuffer ) { int iLen = strlen(sBuffer); m_pBuf = new char [ iLen+1 ]; m_pMax = m_pBuf+iLen; m_pCur = m_pBuf; for ( int i=0; i<=iLen; i++ ) m_pBuf[i] = ToLower ( sBuffer[i] ); } ~SortClauseTokenizer_t () { SafeDeleteArray ( m_pBuf ); } const char * GetToken () { // skip spaces while ( m_pCur=m_pMax ) return NULL; // memorize token start, and move pointer forward const char * sRes = m_pCur; while ( *m_pCur ) m_pCur++; return sRes; } }; static inline ESphSortKeyPart Attr2Keypart ( ESphAttr eType ) { switch ( eType ) { case SPH_ATTR_FLOAT: return SPH_KEYPART_FLOAT; case SPH_ATTR_STRING: return SPH_KEYPART_STRING; default: return SPH_KEYPART_INT; } } static ESortClauseParseResult sphParseSortClause ( const CSphQuery * pQuery, const char * sClause, const CSphSchema & tSchema, ESphSortFunc & eFunc, CSphMatchComparatorState & tState, int * dAttrs, CSphString & sError, CSphSchema * pExtra = NULL ) { assert ( dAttrs ); for ( int i=0; im_dItems ) { const CSphQueryItem & tItem = pQuery->m_dItems[i]; if ( !tItem.m_sAlias.cstr() || strcasecmp ( tItem.m_sAlias.cstr(), pTok ) ) continue; if ( tItem.m_sExpr.Begins("@") ) iAttr = tSchema.GetAttrIndex ( tItem.m_sExpr.cstr() ); break; // break in any case; because we did match the alias } } // epic fail if ( iAttr<0 ) { sError.SetSprintf ( "sort-by attribute '%s' not found", pTok ); return SORT_CLAUSE_ERROR; } const CSphColumnInfo & tCol = tSchema.GetAttr(iAttr); if ( pExtra ) pExtra->AddAttr ( tCol, true ); tState.m_eKeypart[iField] = Attr2Keypart ( tCol.m_eAttrType ); tState.m_tLocator[iField] = tSchema.GetAttr(iAttr).m_tLocator; dAttrs[iField] = iAttr; } } if ( iField==0 ) { sError.SetSprintf ( "no sort order defined" ); return SORT_CLAUSE_ERROR; } if ( iField==1 ) tState.m_eKeypart[iField++] = SPH_KEYPART_ID; // add "id ASC" switch ( iField ) { case 2: eFunc = FUNC_GENERIC2; break; case 3: eFunc = FUNC_GENERIC3; break; case 4: eFunc = FUNC_GENERIC4; break; case 5: eFunc = FUNC_GENERIC5; break; default: sError.SetSprintf ( "INTERNAL ERROR: %d fields in sphParseSortClause()", iField ); return SORT_CLAUSE_ERROR; } return SORT_CLAUSE_OK; } ////////////////////////////////////////////////////////////////////////// // SORTING+GROUPING INSTANTIATION ////////////////////////////////////////////////////////////////////////// template < typename COMPGROUP > static ISphMatchSorter * sphCreateSorter3rd ( const ISphMatchComparator * pComp, const CSphQuery * pQuery, const CSphGroupSorterSettings & tSettings ) { if ( tSettings.m_bMVA ) { if ( tSettings.m_bDistinct==true ) return new CSphKBufferMVAGroupSorter < COMPGROUP, true > ( pComp, pQuery, tSettings); else return new CSphKBufferMVAGroupSorter < COMPGROUP, false > ( pComp, pQuery, tSettings ); } else { if ( tSettings.m_bDistinct==true ) return new CSphKBufferGroupSorter < COMPGROUP, true > ( pComp, pQuery, tSettings ); else return new CSphKBufferGroupSorter < COMPGROUP, false > ( pComp, pQuery, tSettings ); } } static ISphMatchSorter * sphCreateSorter2nd ( ESphSortFunc eGroupFunc, const ISphMatchComparator * pComp, const CSphQuery * pQuery, const CSphGroupSorterSettings & tSettings ) { switch ( eGroupFunc ) { case FUNC_GENERIC2: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; case FUNC_GENERIC3: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; case FUNC_GENERIC4: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; case FUNC_GENERIC5: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; case FUNC_CUSTOM: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; case FUNC_EXPR: return sphCreateSorter3rd ( pComp, pQuery, tSettings ); break; default: return NULL; } } static ISphMatchSorter * sphCreateSorter1st ( ESphSortFunc eMatchFunc, ESphSortFunc eGroupFunc, const CSphQuery * pQuery, const CSphGroupSorterSettings & tSettings ) { ISphMatchComparator * pComp = NULL; switch ( eMatchFunc ) { case FUNC_REL_DESC: pComp = new MatchRelevanceLt_fn(); break; case FUNC_ATTR_DESC: pComp = new MatchAttrLt_fn(); break; case FUNC_ATTR_ASC: pComp = new MatchAttrGt_fn(); break; case FUNC_TIMESEGS: pComp = new MatchTimeSegments_fn(); break; case FUNC_GENERIC2: pComp = new MatchGeneric2_fn(); break; case FUNC_GENERIC3: pComp = new MatchGeneric3_fn(); break; case FUNC_GENERIC4: pComp = new MatchGeneric4_fn(); break; case FUNC_GENERIC5: pComp = new MatchGeneric5_fn(); break; case FUNC_CUSTOM: pComp = new MatchCustom_fn(); break; case FUNC_EXPR: pComp = new MatchExpr_fn(); break; // only for non-bitfields, obviously } assert ( pComp ); return sphCreateSorter2nd ( eGroupFunc, pComp, pQuery, tSettings ); } ////////////////////////////////////////////////////////////////////////// // GEODIST ////////////////////////////////////////////////////////////////////////// struct ExprGeodist_t : public ISphExpr { public: ExprGeodist_t () {} bool Setup ( const CSphQuery * pQuery, const CSphSchema & tSchema, CSphString & sError ); virtual float Eval ( const CSphMatch & tMatch ) const; virtual void SetMVAPool ( const DWORD * ) {} virtual void GetDependencyColumns ( CSphVector & dColumns ) const; protected: CSphAttrLocator m_tGeoLatLoc; CSphAttrLocator m_tGeoLongLoc; float m_fGeoAnchorLat; float m_fGeoAnchorLong; int m_iLat; int m_iLon; }; bool ExprGeodist_t::Setup ( const CSphQuery * pQuery, const CSphSchema & tSchema, CSphString & sError ) { if ( !pQuery->m_bGeoAnchor ) { sError.SetSprintf ( "INTERNAL ERROR: no geoanchor, can not create geodist evaluator" ); return false; } int iLat = tSchema.GetAttrIndex ( pQuery->m_sGeoLatAttr.cstr() ); if ( iLat<0 ) { sError.SetSprintf ( "unknown latitude attribute '%s'", pQuery->m_sGeoLatAttr.cstr() ); return false; } int iLong = tSchema.GetAttrIndex ( pQuery->m_sGeoLongAttr.cstr() ); if ( iLong<0 ) { sError.SetSprintf ( "unknown latitude attribute '%s'", pQuery->m_sGeoLongAttr.cstr() ); return false; } m_tGeoLatLoc = tSchema.GetAttr(iLat).m_tLocator; m_tGeoLongLoc = tSchema.GetAttr(iLong).m_tLocator; m_fGeoAnchorLat = pQuery->m_fGeoLatitude; m_fGeoAnchorLong = pQuery->m_fGeoLongitude; m_iLat = iLat; m_iLon = iLong; return true; } static inline double sphSqr ( double v ) { return v*v; } float ExprGeodist_t::Eval ( const CSphMatch & tMatch ) const { const double R = 6384000; float plat = tMatch.GetAttrFloat ( m_tGeoLatLoc ); float plon = tMatch.GetAttrFloat ( m_tGeoLongLoc ); double dlat = plat - m_fGeoAnchorLat; double dlon = plon - m_fGeoAnchorLong; double a = sphSqr ( sin ( dlat/2 ) ) + cos(plat)*cos(m_fGeoAnchorLat)*sphSqr(sin(dlon/2)); double c = 2*asin ( Min ( 1, sqrt(a) ) ); return (float)(R*c); } void ExprGeodist_t::GetDependencyColumns ( CSphVector & dColumns ) const { dColumns.Add ( m_iLat ); dColumns.Add ( m_iLon ); } ////////////////////////////////////////////////////////////////////////// // PUBLIC FUNCTIONS (FACTORY AND FLATTENING) ////////////////////////////////////////////////////////////////////////// static CSphGrouper * sphCreateGrouperString ( const CSphAttrLocator & tLoc, ESphCollation eCollation ); static bool SetupGroupbySettings ( const CSphQuery * pQuery, const CSphSchema & tSchema, CSphGroupSorterSettings & tSettings, CSphString & sError ) { tSettings.m_tDistinctLoc.m_iBitOffset = -1; if ( pQuery->m_sGroupBy.IsEmpty() ) return true; if ( pQuery->m_eGroupFunc==SPH_GROUPBY_ATTRPAIR ) { sError.SetSprintf ( "SPH_GROUPBY_ATTRPAIR is not supported any more (just group on 'bigint' attribute)" ); return false; } // setup groupby attr int iGroupBy = tSchema.GetAttrIndex ( pQuery->m_sGroupBy.cstr() ); if ( iGroupBy<0 ) { sError.SetSprintf ( "group-by attribute '%s' not found", pQuery->m_sGroupBy.cstr() ); return false; } ESphAttr eType = tSchema.GetAttr ( iGroupBy ).m_eAttrType; CSphAttrLocator tLoc = tSchema.GetAttr ( iGroupBy ).m_tLocator; switch ( pQuery->m_eGroupFunc ) { case SPH_GROUPBY_DAY: tSettings.m_pGrouper = new CSphGrouperDay ( tLoc ); break; case SPH_GROUPBY_WEEK: tSettings.m_pGrouper = new CSphGrouperWeek ( tLoc ); break; case SPH_GROUPBY_MONTH: tSettings.m_pGrouper = new CSphGrouperMonth ( tLoc ); break; case SPH_GROUPBY_YEAR: tSettings.m_pGrouper = new CSphGrouperYear ( tLoc ); break; case SPH_GROUPBY_ATTR: { if ( eType!=SPH_ATTR_STRING ) tSettings.m_pGrouper = new CSphGrouperAttr ( tLoc ); else tSettings.m_pGrouper = sphCreateGrouperString ( tLoc, pQuery->m_eCollation ); } break; default: sError.SetSprintf ( "invalid group-by mode (mode=%d)", pQuery->m_eGroupFunc ); return false; } tSettings.m_bMVA = ( eType==SPH_ATTR_UINT32SET || eType==SPH_ATTR_UINT64SET ); tSettings.m_bMva64 = ( eType==SPH_ATTR_UINT64SET ); // setup distinct attr if ( !pQuery->m_sGroupDistinct.IsEmpty() ) { int iDistinct = tSchema.GetAttrIndex ( pQuery->m_sGroupDistinct.cstr() ); if ( iDistinct<0 ) { sError.SetSprintf ( "group-count-distinct attribute '%s' not found", pQuery->m_sGroupDistinct.cstr() ); return false; } tSettings.m_tDistinctLoc = tSchema.GetAttr ( iDistinct ).m_tLocator; } return true; } static bool FixupDependency ( CSphSchema & tSchema, const int * pAttrs, int iAttrCount ) { assert ( pAttrs ); CSphVector dCur; // add valid attributes to processing list for ( int i=0; i=0 ) dCur.Add ( pAttrs[i] ); int iInitialAttrs = dCur.GetLength(); // collect columns which affect current expressions for ( int i=0; iSPH_EVAL_PRESORT && tCol.m_pExpr.Ptr()!=NULL ) tCol.m_pExpr->GetDependencyColumns ( dCur ); } // get rid of dupes dCur.Uniq(); // fix up of attributes stages ARRAY_FOREACH ( i, dCur ) { int iAttr = dCur[i]; if ( iAttr<0 ) continue; CSphColumnInfo & tCol = const_cast < CSphColumnInfo & > ( tSchema.GetAttr ( iAttr ) ); if ( tCol.m_eStage==SPH_EVAL_FINAL ) tCol.m_eStage = SPH_EVAL_PRESORT; } // it uses attributes if it has dependencies from other attributes return ( iInitialAttrs>dCur.GetLength() ); } // expression that transform string pool base + offset -> ptr struct ExprSortStringAttrFixup_c : public ISphExpr { const BYTE * m_pStrings; ///< string pool; base for offset of string attributes const CSphAttrLocator m_tLocator; ///< string attribute to fix explicit ExprSortStringAttrFixup_c ( const CSphAttrLocator & tLocator ) : m_pStrings ( NULL ) , m_tLocator ( tLocator ) { } virtual float Eval ( const CSphMatch & ) const { assert ( 0 ); return 0.0f; } virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { SphAttr_t uOff = tMatch.GetAttr ( m_tLocator ); return (int64_t)( m_pStrings && uOff ? m_pStrings + uOff : NULL ); } virtual void SetStringPool ( const BYTE * pStrings ) { m_pStrings = pStrings; } }; static const char g_sIntAttrPrefix[] = "@int_str2ptr_"; bool sphIsSortStringInternal ( const char * sColumnName ) { assert ( sColumnName ); return ( strncmp ( sColumnName, g_sIntAttrPrefix, sizeof(g_sIntAttrPrefix)-1 )==0 ); } static bool SetupSortStringRemap ( CSphSchema & tSorterSchema, CSphMatchComparatorState & tState, const int * dAttr ) { #ifndef NDEBUG int iColWasCount = tSorterSchema.GetAttrsCount(); #endif bool bUsesAtrrs = false; for ( int i=0; i=0 && dAttr[i]m_tLocator ); } bool sphSortGetStringRemap ( const CSphSchema & tSorterSchema, const CSphSchema & tIndexSchema, CSphVector & dAttrs ) { dAttrs.Resize ( 0 ); for ( int i=0; im_tLocator; tRemap.m_tDst = tDst.m_tLocator; } return ( dAttrs.GetLength()>0 ); } //////////////////// // BINARY COLLATION //////////////////// int CollateBinary ( const BYTE * pStr1, const BYTE * pStr2 ) { int iLen1 = sphUnpackStr ( pStr1, &pStr1 ); int iLen2 = sphUnpackStr ( pStr2, &pStr2 ); int iRes = memcmp ( (const char *)pStr1, (const char *)pStr2, Min ( iLen1, iLen2 ) ); return iRes ? iRes : ( iLen1-iLen2 ); } /////////////////////////////// // LIBC_CI, LIBC_CS COLLATIONS /////////////////////////////// /// libc_ci, wrapper for strcasecmp int CollateLibcCI ( const BYTE * pStr1, const BYTE * pStr2 ) { int iLen1 = sphUnpackStr ( pStr1, &pStr1 ); int iLen2 = sphUnpackStr ( pStr2, &pStr2 ); int iRes = strncasecmp ( (const char *)pStr1, (const char *)pStr2, Min ( iLen1, iLen2 ) ); return iRes ? iRes : ( iLen1-iLen2 ); } /// libc_cs, wrapper for strcoll int CollateLibcCS ( const BYTE * pStr1, const BYTE * pStr2 ) { #define COLLATE_STACK_BUFFER 1024 int iLen1 = sphUnpackStr ( pStr1, &pStr1 ); int iLen2 = sphUnpackStr ( pStr2, &pStr2 ); // strcoll wants asciiz strings, so we would have to copy them over // lets use stack buffer for smaller ones, and allocate from heap for bigger ones int iRes = 0; int iLen = Min ( iLen1, iLen2 ); if ( iLen=0x70 && i<=0x7f )*16 ); // 2170..217f, -16 g_dCollWeights_UTF8CI[i+0x900] = (unsigned short)( 0x2400 + i - ( i>=0xd0 && i<=0xe9 )*26 ); // 24d0..24e9, -26 g_dCollWeights_UTF8CI[i+0xa00] = (unsigned short)( 0xff00 + i - ( i>=0x41 && i<=0x5a )*32 ); // ff41..ff5a, -32 } // generate planes table for ( int i=0; i<0x100; i++ ) g_dCollPlanes_UTF8CI[i] = NULL; for ( int i=0; i<0x0b; i++ ) g_dCollPlanes_UTF8CI [ dWeightPlane[i] ] = g_dCollWeights_UTF8CI + 0x100*i; } /// collate a single codepoint static inline int CollateUTF8CI ( int iCode ) { return ( ( iCode>>16 ) || !g_dCollPlanes_UTF8CI [ iCode>>8 ] ) ? iCode : g_dCollPlanes_UTF8CI [ iCode>>8 ][ iCode&0xff ]; } /// utf8_general_ci int CollateUtf8GeneralCI ( const BYTE * pArg1, const BYTE * pArg2 ) { // some const breakage and mess // we MUST NOT actually modify the data // but sphUTF8Decode() calls currently need non-const pointers BYTE * pStr1 = (BYTE*) pArg1; BYTE * pStr2 = (BYTE*) pArg2; int iLen1 = sphUnpackStr ( pStr1, (const BYTE**)&pStr1 ); int iLen2 = sphUnpackStr ( pStr2, (const BYTE**)&pStr2 ); const BYTE * pMax1 = pStr1 + iLen1; const BYTE * pMax2 = pStr2 + iLen2; while ( pStr1=pMax1 && pStr2>=pMax2 ) return 0; return ( pStr1==pMax1 ) ? 1 : -1; } ///////////////////////////// // hashing functions ///////////////////////////// class LibcCSHash_fn { public: mutable CSphTightVector m_dBuf; static const int LOCALE_SAFE_GAP = 16; LibcCSHash_fn() { m_dBuf.Resize ( COLLATE_STACK_BUFFER ); } uint64_t Hash ( const BYTE * pStr, int iLen ) const { assert ( pStr && iLen ); int iCompositeLen = iLen + 1 + (int)( 3.0f * iLen ) + LOCALE_SAFE_GAP; if ( m_dBuf.GetLength() ( tLoc ); else if ( eCollation==SPH_COLLATION_LIBC_CI ) return new CSphGrouperString ( tLoc ); else if ( eCollation==SPH_COLLATION_LIBC_CS ) return new CSphGrouperString ( tLoc ); else return new CSphGrouperString ( tLoc ); } ///////////////////////// // SORTING QUEUE FACTORY ///////////////////////// ISphMatchSorter * sphCreateQueue ( const CSphQuery * pQuery, const CSphSchema & tSchema, CSphString & sError, bool bComputeItems, CSphSchema * pExtra, CSphAttrUpdateEx* pUpdate ) { // prepare for descent ISphMatchSorter * pTop = NULL; CSphMatchComparatorState tStateMatch, tStateGroup; sError = ""; /////////////////////////////////////// // build incoming and outgoing schemas /////////////////////////////////////// // sorter schema // adds computed expressions and groupby stuff on top of the original index schema CSphSchema tSorterSchema = tSchema; // setup overrides, detach them into dynamic part ARRAY_FOREACH ( i, pQuery->m_dOverrides ) { const char * sAttr = pQuery->m_dOverrides[i].m_sAttr.cstr(); int iIndex = tSorterSchema.GetAttrIndex ( sAttr ); if ( iIndex<0 ) { sError.SetSprintf ( "override attribute '%s' not found", sAttr ); return NULL; } CSphColumnInfo tCol = tSorterSchema.GetAttr ( iIndex ); tCol.m_eStage = SPH_EVAL_OVERRIDE; tSorterSchema.AddAttr ( tCol, true ); if ( pExtra ) pExtra->AddAttr ( tCol, true ); tSorterSchema.RemoveAttr ( iIndex ); } // setup @geodist if ( pQuery->m_bGeoAnchor && tSorterSchema.GetAttrIndex ( "@geodist" )<0 ) { ExprGeodist_t * pExpr = new ExprGeodist_t (); if ( !pExpr->Setup ( pQuery, tSorterSchema, sError ) ) { pExpr->Release (); return NULL; } CSphColumnInfo tCol ( "@geodist", SPH_ATTR_FLOAT ); tCol.m_pExpr = pExpr; // takes ownership, no need to for explicit pExpr release tCol.m_eStage = SPH_EVAL_PREFILTER; // OPTIMIZE? actual stage depends on usage tSorterSchema.AddAttr ( tCol, true ); if ( pExtra ) pExtra->AddAttr ( tCol, true ); } // setup @expr if ( pQuery->m_eSort==SPH_SORT_EXPR && tSorterSchema.GetAttrIndex ( "@expr" )<0 ) { CSphColumnInfo tCol ( "@expr", SPH_ATTR_FLOAT ); // enforce float type for backwards compatibility (ie. too lazy to fix those tests right now) tCol.m_pExpr = sphExprParse ( pQuery->m_sSortBy.cstr(), tSorterSchema, NULL, NULL, sError, pExtra ); if ( !tCol.m_pExpr ) return NULL; tCol.m_eStage = SPH_EVAL_PRESORT; tSorterSchema.AddAttr ( tCol, true ); } // expressions from select items CSphVector dAggregates; bool bHasCount = false; if ( bComputeItems ) ARRAY_FOREACH ( iItem, pQuery->m_dItems ) { const CSphQueryItem & tItem = pQuery->m_dItems[iItem]; const CSphString & sExpr = tItem.m_sExpr; bool bIsCount = IsCount(sExpr); bHasCount |= bIsCount; if ( bIsCount && sExpr.cstr()[0]!='@' ) { CSphString & sExprW = const_cast < CSphString & > ( sExpr ); sExprW = "@count"; } // for now, just always pass "plain" attrs from index to sorter; they will be filtered on searchd level if ( sExpr=="*" || ( tSchema.GetAttrIndex ( sExpr.cstr() )>=0 && tItem.m_eAggrFunc==SPH_AGGR_NONE ) || IsGroupby(sExpr) || bIsCount ) { continue; } // not an attribute? must be an expression, and must be aliased if ( tItem.m_sAlias.IsEmpty() ) { sError.SetSprintf ( "expression '%s' must be aliased (use 'expr AS alias' syntax)", tItem.m_sExpr.cstr() ); return NULL; } // tricky part // we might be fed with precomputed matches, but it's all or nothing // the incoming match either does not have anything computed, or it has everything if ( tSchema.GetAttrsCount()==tSorterSchema.GetAttrsCount() ) { // so far we had everything, so we might be precomputed, and the alias just might already exist int iSuspect = tSchema.GetAttrIndex ( tItem.m_sAlias.cstr() ); if ( iSuspect>=0 ) { // however, let's ensure that it was an expression if ( tSchema.GetAttr ( iSuspect ).m_pExpr.Ptr()!=NULL ) continue; // otherwise we're not precomputed, *and* have a duplicate name sError.SetSprintf ( "alias '%s' must be unique (conflicts with an index attribute)", tItem.m_sAlias.cstr() ); return NULL; } } else { // we are adding stuff, must not be precomputed, check for both kinds of dupes if ( tSchema.GetAttrIndex ( tItem.m_sAlias.cstr() )>=0 ) { sError.SetSprintf ( "alias '%s' must be unique (conflicts with an index attribute)", tItem.m_sAlias.cstr() ); return NULL; } if ( tSorterSchema.GetAttrIndex ( tItem.m_sAlias.cstr() )>=0 ) { sError.SetSprintf ( "alias '%s' must be unique (conflicts with another alias)", tItem.m_sAlias.cstr() ); return NULL; } } // a new and shiny expression, lets parse bool bUsesWeight; CSphColumnInfo tExprCol ( tItem.m_sAlias.cstr(), SPH_ATTR_NONE ); tExprCol.m_pExpr = sphExprParse ( sExpr.cstr(), tSorterSchema, &tExprCol.m_eAttrType, &bUsesWeight, sError, pExtra ); tExprCol.m_eAggrFunc = tItem.m_eAggrFunc; if ( !tExprCol.m_pExpr ) { sError.SetSprintf ( "parse error: %s", sError.cstr() ); return NULL; } // force AVG() to be computed in floats if ( tExprCol.m_eAggrFunc==SPH_AGGR_AVG ) { tExprCol.m_eAttrType = SPH_ATTR_FLOAT; tExprCol.m_tLocator.m_iBitCount = 32; } // postpone aggregates, add non-aggregates if ( tExprCol.m_eAggrFunc==SPH_AGGR_NONE ) { // by default, lets be lazy and compute expressions as late as possible tExprCol.m_eStage = SPH_EVAL_FINAL; // is this expression used in filter? // OPTIMIZE? hash filters and do hash lookups? ARRAY_FOREACH ( i, pQuery->m_dFilters ) if ( pQuery->m_dFilters[i].m_sAttrName==tExprCol.m_sName ) { if ( bUsesWeight ) { tExprCol.m_eStage = SPH_EVAL_PRESORT; // special, weight filter break; } // usual filter tExprCol.m_eStage = SPH_EVAL_PREFILTER; // so we are about to add a filter condition // but it might depend on some preceding columns // lets detect those and move them to prefilter phase too CSphVector dCur; tExprCol.m_pExpr->GetDependencyColumns ( dCur ); ARRAY_FOREACH ( i, dCur ) { CSphColumnInfo & tDep = const_cast < CSphColumnInfo & > ( tSorterSchema.GetAttr ( dCur[i] ) ); if ( tDep.m_eStage>SPH_EVAL_PREFILTER ) tDep.m_eStage = SPH_EVAL_PREFILTER; } break; } // add it! // NOTE, "final" stage might need to be fixed up later // we'll do that when parsing sorting clause tSorterSchema.AddAttr ( tExprCol, true ); } else { tExprCol.m_eStage = SPH_EVAL_PRESORT; // sorter expects computed expression dAggregates.Add ( tExprCol ); } } // expressions wrapped in aggregates must be at the very end of pre-groupby match ARRAY_FOREACH ( i, dAggregates ) { tSorterSchema.AddAttr ( dAggregates[i], true ); if ( pExtra ) pExtra->AddAttr ( dAggregates[i], true ); } //////////////////////////////////////////// // setup groupby settings, create shortcuts //////////////////////////////////////////// CSphGroupSorterSettings tSettings; if ( !SetupGroupbySettings ( pQuery, tSorterSchema, tSettings, sError ) ) return NULL; const bool bGotGroupby = !pQuery->m_sGroupBy.IsEmpty(); // or else, check in SetupGroupbySettings() would already fail const bool bGotDistinct = ( tSettings.m_tDistinctLoc.m_iBitOffset>=0 ); // now lets add @groupby etc if needed if ( bGotGroupby && tSorterSchema.GetAttrIndex ( "@groupby" )<0 ) { CSphColumnInfo tGroupby ( "@groupby", tSettings.m_pGrouper->GetResultType() ); CSphColumnInfo tCount ( "@count", SPH_ATTR_INTEGER ); CSphColumnInfo tDistinct ( "@distinct", SPH_ATTR_INTEGER ); tGroupby.m_eStage = SPH_EVAL_SORTER; tCount.m_eStage = SPH_EVAL_SORTER; tDistinct.m_eStage = SPH_EVAL_SORTER; tSorterSchema.AddAttr ( tGroupby, true ); if ( pExtra ) pExtra->AddAttr ( tGroupby, true ); tSorterSchema.AddAttr ( tCount, true ); if ( pExtra ) pExtra->AddAttr ( tCount, true ); if ( bGotDistinct ) { tSorterSchema.AddAttr ( tDistinct, true ); if ( pExtra ) pExtra->AddAttr ( tDistinct, true ); } } #define LOC_CHECK(_cond,_msg) if (!(_cond)) { sError = "invalid schema: " _msg; return false; } int iGroupby = tSorterSchema.GetAttrIndex ( "@groupby" ); if ( iGroupby>=0 ) { tSettings.m_bDistinct = bGotDistinct; tSettings.m_tLocGroupby = tSorterSchema.GetAttr ( iGroupby ).m_tLocator; LOC_CHECK ( tSettings.m_tLocGroupby.m_bDynamic, "@groupby must be dynamic" ); int iCount = tSorterSchema.GetAttrIndex ( "@count" ); LOC_CHECK ( iCount>=0, "missing @count" ); tSettings.m_tLocCount = tSorterSchema.GetAttr ( iCount ).m_tLocator; LOC_CHECK ( tSettings.m_tLocCount.m_bDynamic, "@count must be dynamic" ); int iDistinct = tSorterSchema.GetAttrIndex ( "@distinct" ); if ( bGotDistinct ) { LOC_CHECK ( iDistinct>=0, "missing @distinct" ); tSettings.m_tLocDistinct = tSorterSchema.GetAttr ( iDistinct ).m_tLocator; LOC_CHECK ( tSettings.m_tLocDistinct.m_bDynamic, "@distinct must be dynamic" ); } else { LOC_CHECK ( iDistinct<=0, "unexpected @distinct" ); } } if ( bHasCount ) { LOC_CHECK ( tSorterSchema.GetAttrIndex ( "@count" )>=0, "Count(*) or @count is queried, but not available in the schema" ); } #undef LOC_CHECK //////////////////////////////////// // choose and setup sorting functor //////////////////////////////////// ESphSortFunc eMatchFunc = FUNC_REL_DESC; ESphSortFunc eGroupFunc = FUNC_REL_DESC; bool bUsesAttrs = false; bool bRandomize = false; // matches sorting function if ( pQuery->m_eSort==SPH_SORT_EXTENDED ) { int dAttrs [ CSphMatchComparatorState::MAX_ATTRS ]; ESortClauseParseResult eRes = sphParseSortClause ( pQuery, pQuery->m_sSortBy.cstr(), tSorterSchema, eMatchFunc, tStateMatch, dAttrs, sError, pExtra ); if ( eRes==SORT_CLAUSE_ERROR ) return NULL; if ( eRes==SORT_CLAUSE_RANDOM ) bRandomize = true; bUsesAttrs = FixupDependency ( tSorterSchema, dAttrs, CSphMatchComparatorState::MAX_ATTRS ); if ( !bUsesAttrs ) { for ( int i=0; im_eSort==SPH_SORT_EXPR ) { tStateMatch.m_eKeypart[0] = SPH_KEYPART_INT; tStateMatch.m_tLocator[0] = tSorterSchema.GetAttr ( tSorterSchema.GetAttrIndex ( "@expr" ) ).m_tLocator; tStateMatch.m_eKeypart[1] = SPH_KEYPART_ID; tStateMatch.m_uAttrDesc = 1; eMatchFunc = FUNC_EXPR; bUsesAttrs = true; } else { // check sort-by attribute if ( pQuery->m_eSort!=SPH_SORT_RELEVANCE ) { int iSortAttr = tSorterSchema.GetAttrIndex ( pQuery->m_sSortBy.cstr() ); if ( iSortAttr<0 ) { sError.SetSprintf ( "sort-by attribute '%s' not found", pQuery->m_sSortBy.cstr() ); return NULL; } const CSphColumnInfo & tAttr = tSorterSchema.GetAttr ( iSortAttr ); tStateMatch.m_eKeypart[0] = Attr2Keypart ( tAttr.m_eAttrType ); tStateMatch.m_tLocator[0] = tAttr.m_tLocator; int dAttrs [ CSphMatchComparatorState::MAX_ATTRS ]; dAttrs[0] = iSortAttr; bUsesAttrs |= SetupSortStringRemap ( tSorterSchema, tStateMatch, dAttrs ); } // find out what function to use and whether it needs attributes bUsesAttrs = true; switch ( pQuery->m_eSort ) { case SPH_SORT_ATTR_DESC: eMatchFunc = FUNC_ATTR_DESC; break; case SPH_SORT_ATTR_ASC: eMatchFunc = FUNC_ATTR_ASC; break; case SPH_SORT_TIME_SEGMENTS: eMatchFunc = FUNC_TIMESEGS; break; case SPH_SORT_RELEVANCE: eMatchFunc = FUNC_REL_DESC; bUsesAttrs = false; break; default: sError.SetSprintf ( "unknown sorting mode %d", pQuery->m_eSort ); return NULL; } } // groups sorting function if ( bGotGroupby ) { int dAttrs [ CSphMatchComparatorState::MAX_ATTRS ]; ESortClauseParseResult eRes = sphParseSortClause ( pQuery, pQuery->m_sGroupSortBy.cstr(), tSorterSchema, eGroupFunc, tStateGroup, dAttrs, sError, pExtra ); if ( eRes==SORT_CLAUSE_ERROR || eRes==SORT_CLAUSE_RANDOM ) { if ( eRes==SORT_CLAUSE_RANDOM ) sError.SetSprintf ( "groups can not be sorted by @random" ); return NULL; } int idx = tSorterSchema.GetAttrIndex ( pQuery->m_sGroupBy.cstr() ); if ( pExtra ) pExtra->AddAttr ( tSorterSchema.GetAttr ( idx ), true ); FixupDependency ( tSorterSchema, &idx, 1 ); FixupDependency ( tSorterSchema, dAttrs, CSphMatchComparatorState::MAX_ATTRS ); // GroupSortBy str attributes setup bUsesAttrs |= SetupSortStringRemap ( tSorterSchema, tStateGroup, dAttrs ); } /////////////////// // spawn the queue /////////////////// if ( !bGotGroupby ) { if ( pUpdate ) pTop = new CSphUpdateQueue ( pQuery->m_iMaxMatches, pUpdate ); else switch ( eMatchFunc ) { case FUNC_REL_DESC: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_ATTR_DESC:pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_ATTR_ASC: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_TIMESEGS: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_GENERIC2: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_GENERIC3: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_GENERIC4: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_GENERIC5: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_CUSTOM: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; case FUNC_EXPR: pTop = new CSphMatchQueue ( pQuery->m_iMaxMatches, bUsesAttrs ); break; default: pTop = NULL; } } else { pTop = sphCreateSorter1st ( eMatchFunc, eGroupFunc, pQuery, tSettings ); } if ( !pTop ) { sError.SetSprintf ( "internal error: unhandled sorting mode (match-sort=%d, group=%d, group-sort=%d)", eMatchFunc, bGotGroupby, eGroupFunc ); return NULL; } switch ( pQuery->m_eCollation ) { case SPH_COLLATION_LIBC_CI: tStateMatch.m_fnStrCmp = CollateLibcCI; tStateGroup.m_fnStrCmp = CollateLibcCI; break; case SPH_COLLATION_LIBC_CS: tStateMatch.m_fnStrCmp = CollateLibcCS; tStateGroup.m_fnStrCmp = CollateLibcCS; break; case SPH_COLLATION_UTF8_GENERAL_CI: tStateMatch.m_fnStrCmp = CollateUtf8GeneralCI; tStateGroup.m_fnStrCmp = CollateUtf8GeneralCI; break; case SPH_COLLATION_BINARY: tStateMatch.m_fnStrCmp = CollateBinary; tStateGroup.m_fnStrCmp = CollateBinary; break; } assert ( pTop ); pTop->SetState ( tStateMatch ); pTop->SetGroupState ( tStateGroup ); pTop->SetSchema ( tSorterSchema ); pTop->m_bRandomize = bRandomize; if ( bRandomize ) sphAutoSrand (); return pTop; } void sphFlattenQueue ( ISphMatchSorter * pQueue, CSphQueryResult * pResult, int iTag ) { if ( pQueue && pQueue->GetLength() ) { int iOffset = pResult->m_dMatches.GetLength (); pResult->m_dMatches.Resize ( iOffset + pQueue->GetLength() ); pQueue->Flatten ( &pResult->m_dMatches[iOffset], iTag ); } } bool sphHasExpressions ( const CSphQuery & tQuery, const CSphSchema & tSchema ) { ARRAY_FOREACH ( i, tQuery.m_dItems ) { const CSphString & sExpr = tQuery.m_dItems[i].m_sExpr; if ( !( sExpr=="*" || ( tSchema.GetAttrIndex ( sExpr.cstr() )>=0 && tQuery.m_dItems[i].m_eAggrFunc==SPH_AGGR_NONE && tQuery.m_dItems[i].m_sAlias.IsEmpty() ) || IsGroupbyMagic(sExpr) ) ) return true; } return false; } // // $Id: sphinxsort.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/svnxrev.wsf0000644000176700017710000000465511101660531016772 0ustar deogardeogar sphinx-2.0.4-release/src/sphinxql.l0000644000176700017710000001353111636132170016560 0ustar deogardeogar%{ #include "yysphinxql.h" #if USE_WINDOWS #pragma warning(push,1) #endif // warning, lexer generator dependent! // this macro relies on that in flex yytext points to the actual location in the buffer #define YYSTOREBOUNDS \ { \ lvalp->m_iStart = yytext - pParser->m_pBuf; \ lvalp->m_iEnd = lvalp->m_iStart + yyleng; \ pParser->m_pLastTokenStart = yytext; \ } %} DIGIT [0-9] ID [a-zA-Z_][a-zA-Z_0-9]* SPACE [ \t\n\r] %option noyywrap %option nounput %option reentrant %x ccomment %% "/*" { BEGIN(ccomment); } . { } "*/" { BEGIN(INITIAL); } "AND" { YYSTOREBOUNDS; return TOK_AND; } "AS" { YYSTOREBOUNDS; return TOK_AS; } "ASC" { YYSTOREBOUNDS; return TOK_ASC; } "ATTACH" { YYSTOREBOUNDS; return TOK_ATTACH; } "AVG" { YYSTOREBOUNDS; return TOK_AVG; } "BEGIN" { YYSTOREBOUNDS; return TOK_BEGIN; } "BETWEEN" { YYSTOREBOUNDS; return TOK_BETWEEN; } "BY" { YYSTOREBOUNDS; return TOK_BY; } "CALL" { YYSTOREBOUNDS; return TOK_CALL; } "COLLATION" { YYSTOREBOUNDS; return TOK_COLLATION; } "COMMIT" { YYSTOREBOUNDS; return TOK_COMMIT; } "COMMITTED" { YYSTOREBOUNDS; return TOK_COMMITTED; } "COUNT" { YYSTOREBOUNDS; return TOK_COUNT; } "CREATE" { YYSTOREBOUNDS; return TOK_CREATE; } "DELETE" { YYSTOREBOUNDS; return TOK_DELETE; } "DESC" { YYSTOREBOUNDS; return TOK_DESC; } "DESCRIBE" { YYSTOREBOUNDS; return TOK_DESCRIBE; } "DISTINCT" { YYSTOREBOUNDS; return TOK_DISTINCT; } "DIV" { YYSTOREBOUNDS; return TOK_DIV; } "DROP" { YYSTOREBOUNDS; return TOK_DROP; } "FALSE" { YYSTOREBOUNDS; return TOK_FALSE; } "FLOAT" { YYSTOREBOUNDS; return TOK_FLOAT; } "FLUSH" { YYSTOREBOUNDS; return TOK_FLUSH; } "FROM" { YYSTOREBOUNDS; return TOK_FROM; } "FUNCTION" { YYSTOREBOUNDS; return TOK_FUNCTION; } "GLOBAL" { YYSTOREBOUNDS; return TOK_GLOBAL; } "GROUP" { YYSTOREBOUNDS; return TOK_GROUP; } "ID" { YYSTOREBOUNDS; return TOK_ID; } "IN" { YYSTOREBOUNDS; return TOK_IN; } "INDEX" { YYSTOREBOUNDS; return TOK_INDEX; } "INSERT" { YYSTOREBOUNDS; return TOK_INSERT; } "INT" { YYSTOREBOUNDS; return TOK_INT; } "INTO" { YYSTOREBOUNDS; return TOK_INTO; } "ISOLATION" { YYSTOREBOUNDS; return TOK_ISOLATION; } "LEVEL" { YYSTOREBOUNDS; return TOK_LEVEL; } "LIMIT" { YYSTOREBOUNDS; return TOK_LIMIT; } "MATCH" { YYSTOREBOUNDS; return TOK_MATCH; } "MAX" { YYSTOREBOUNDS; return TOK_MAX; } "META" { YYSTOREBOUNDS; return TOK_META; } "MIN" { YYSTOREBOUNDS; return TOK_MIN; } "MOD" { YYSTOREBOUNDS; return TOK_MOD; } "NAMES" { YYSTOREBOUNDS; return TOK_NAMES; } "NOT" { YYSTOREBOUNDS; return TOK_NOT; } "NULL" { YYSTOREBOUNDS; return TOK_NULL; } "OPTION" { YYSTOREBOUNDS; return TOK_OPTION; } "OR" { YYSTOREBOUNDS; return TOK_OR; } "ORDER" { YYSTOREBOUNDS; return TOK_ORDER; } "RAND" { YYSTOREBOUNDS; return TOK_RAND; } "READ" { YYSTOREBOUNDS; return TOK_READ; } "REPEATABLE" { YYSTOREBOUNDS; return TOK_REPEATABLE; } "REPLACE" { YYSTOREBOUNDS; return TOK_REPLACE; } "RETURNS" { YYSTOREBOUNDS; return TOK_RETURNS; } "ROLLBACK" { YYSTOREBOUNDS; return TOK_ROLLBACK; } "RTINDEX" { YYSTOREBOUNDS; return TOK_RTINDEX; } "SELECT" { YYSTOREBOUNDS; return TOK_SELECT; } "SERIALIZABLE" { YYSTOREBOUNDS; return TOK_SERIALIZABLE; } "SET" { YYSTOREBOUNDS; return TOK_SET; } "SESSION" { YYSTOREBOUNDS; return TOK_SESSION; } "SHOW" { YYSTOREBOUNDS; return TOK_SHOW; } "SONAME" { YYSTOREBOUNDS; return TOK_SONAME; } "START" { YYSTOREBOUNDS; return TOK_START; } "STATUS" { YYSTOREBOUNDS; return TOK_STATUS; } "SUM" { YYSTOREBOUNDS; return TOK_SUM; } "TABLES" { YYSTOREBOUNDS; return TOK_TABLES; } "TO" { YYSTOREBOUNDS; return TOK_TO; } "TRANSACTION" { YYSTOREBOUNDS; return TOK_TRANSACTION; } "TRUE" { YYSTOREBOUNDS; return TOK_TRUE; } "UNCOMMITTED" { YYSTOREBOUNDS; return TOK_UNCOMMITTED; } "UPDATE" { YYSTOREBOUNDS; return TOK_UPDATE; } "VALUES" { YYSTOREBOUNDS; return TOK_VALUES; } "VARIABLES" { YYSTOREBOUNDS; return TOK_VARIABLES; } "WARNINGS" { YYSTOREBOUNDS; return TOK_WARNINGS; } "WEIGHT" { YYSTOREBOUNDS; return TOK_WEIGHT; } "WHERE" { YYSTOREBOUNDS; return TOK_WHERE; } "WITHIN" { YYSTOREBOUNDS; return TOK_WITHIN; } "!=" { YYSTOREBOUNDS; return TOK_NE; } "<>" { YYSTOREBOUNDS; return TOK_NE; } "<=" { YYSTOREBOUNDS; return TOK_LTE; } ">=" { YYSTOREBOUNDS; return TOK_GTE; } ":=" { YYSTOREBOUNDS; return '='; } '([^'\\]|\\.|\\\\)*' { YYSTOREBOUNDS; SqlUnescape ( lvalp->m_sValue, yytext, yyleng ); return TOK_QUOTED_STRING; } {DIGIT}*\.{DIGIT}* { YYSTOREBOUNDS; lvalp->m_sValue = yytext; lvalp->m_fValue = (float)strtod ( yytext, NULL ); return TOK_CONST_FLOAT; } {DIGIT}+ { YYSTOREBOUNDS; lvalp->m_sValue = yytext; lvalp->m_iValue = strtoll ( yytext, NULL, 10 ); return TOK_CONST_INT; } "@id" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } "@weight" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } "@count" { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_ATIDENT; } {ID} { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_IDENT; } @{ID} { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_USERVAR; } @@{ID} { YYSTOREBOUNDS; lvalp->m_sValue = yytext; return TOK_SYSVAR; } `{ID}` { YYSTOREBOUNDS; lvalp->m_iStart++; lvalp->m_iEnd--; lvalp->m_sValue.SetBinary ( yytext+1, strlen(yytext)-2 ); return TOK_IDENT; } {SPACE}+ { ; } . { YYSTOREBOUNDS; return yytext[0]; } %% // warning, lexer generator dependent! // flex inserts trailing zero as needed into the buffer when lexing // but we need that rolled back when doing error reporting from yyerror void yylex_unhold ( yyscan_t yyscanner ) { struct yyguts_t * yyg = (struct yyguts_t*)yyscanner; if ( YY_CURRENT_BUFFER ) { *yyg->yy_c_buf_p = yyg->yy_hold_char; YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = yyg->yy_c_buf_p; YY_CURRENT_BUFFER_LVALUE->yy_n_chars = yyg->yy_n_chars; } } #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/Makefile.am0000644000176700017710000000213011421071150016552 0ustar deogardeogarSRC_SPHINX = sphinx.cpp sphinxexcerpt.cpp sphinxquery.cpp \ sphinxsoundex.cpp sphinxmetaphone.cpp sphinxstemen.cpp sphinxstemru.cpp sphinxstemcz.cpp \ sphinxutils.cpp md5.cpp sphinxstd.cpp sphinxsort.cpp sphinxexpr.cpp sphinxfilter.cpp \ sphinxsearch.cpp sphinxrt.cpp noinst_LIBRARIES = libsphinx.a libsphinx_a_SOURCES = $(SRC_SPHINX) bin_PROGRAMS = indexer searchd search spelldump indextool noinst_PROGRAMS = tests indexer_SOURCES = indexer.cpp searchd_SOURCES = searchd.cpp search_SOURCES = search.cpp spelldump_SOURCES = spelldump.cpp indextool_SOURCES = indextool.cpp tests_SOURCES = tests.cpp BUILT_SOURCES = extract-version .PHONY: extract-version extract-version: /bin/sh svnxrev.sh .. if USE_LIBSTEMMER LIBSTEMMER_LIBS = $(top_srcdir)/libstemmer_c/libstemmer.a AM_CPPFLAGS = -I$(top_srcdir)/libstemmer_c/include -DSYSCONFDIR="\"$(sysconfdir)\"" -DDATADIR="\"$(localstatedir)/data\"" else LIBSTEMMER_LIBS = AM_CPPFLAGS = -DSYSCONFDIR="\"$(sysconfdir)\"" -DDATADIR="\"$(localstatedir)/data\"" endif COMMON_LIBS = libsphinx.a $(LIBSTEMMER_LIBS) $(MYSQL_LIBS) $(PGSQL_LIBS) LDADD = $(COMMON_LIBS) sphinx-2.0.4-release/src/sphinxsoundex.cpp0000644000176700017710000000200611711621267020157 0ustar deogardeogar// // $Id: sphinxsoundex.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" void stem_soundex ( BYTE * pWord ) { static BYTE dLetter2Code[27] = "01230120022455012623010202"; // check if the word only contains lowercase English letters BYTE * p = pWord; while ( *p>='a' && *p<='z' ) p++; if ( *p ) return; // do soundex p = pWord+1; BYTE * pOut = pWord+1; while ( *p ) { BYTE c = dLetter2Code [ (*p)-'a' ]; if ( c!='0' && pOut[-1]!=c ) *pOut++ = c; p++; } while ( pOut-pWord<4 && pOut

=Word.iLengthPadded ) return false; char * szPos = (char *)Word.pWord + iStart; return !strncmp ( szPos, szStr1, iLength ); } static bool StrAt ( const CurrentWord_t & Word, int iStart, int iLength, const char * szStr1, const char * szStr2 ) { if ( iStart<0 || iStart>=Word.iLengthPadded ) return false; char * szPos = (char *)Word.pWord + iStart; return !strncmp ( szPos, szStr1, iLength ) || !strncmp ( szPos, szStr2, iLength ); } static bool StrAt ( const CurrentWord_t & Word, int iStart, int iLength, const char * szStr1, const char * szStr2, const char * szStr3 ) { if ( iStart<0 || iStart>=Word.iLengthPadded ) return false; char * szPos = (char *)Word.pWord + iStart; return !strncmp ( szPos, szStr1, iLength ) || !strncmp ( szPos, szStr2, iLength ) || !strncmp ( szPos, szStr3, iLength ); } static bool StrAt ( const CurrentWord_t & Word, int iStart, int iLength, const char * szStr1, const char * szStr2, const char * szStr3, const char * szStr4 ) { if ( iStart<0 || iStart>=Word.iLengthPadded ) return false; char * szPos = (char *)Word.pWord + iStart; return !strncmp ( szPos, szStr1, iLength ) || !strncmp ( szPos, szStr2, iLength ) || !strncmp ( szPos, szStr3, iLength ) || !strncmp ( szPos, szStr4, iLength ); } static bool StrAt ( const CurrentWord_t & Word, int iStart, int iLength, const char * szStr1, const char * szStr2, const char * szStr3, const char * szStr4, const char * szStr5 ) { if ( iStart<0 || iStart>=Word.iLengthPadded ) return false; char * szPos = (char *)Word.pWord + iStart; return !strncmp ( szPos, szStr1, iLength ) || !strncmp ( szPos, szStr2, iLength ) || !strncmp ( szPos, szStr3, iLength ) || !strncmp ( szPos, szStr4, iLength ) || !strncmp ( szPos, szStr5, iLength ); } static void MetaphAdd ( BYTE * pPrimary, BYTE * pSecondary, const char * szAddPrimary, const char * szAddSecondary ) { strcat ( (char*)pPrimary, szAddPrimary ); // NOLINT strcat ( (char*)pSecondary, szAddSecondary ); // NOLINT } #define ADD_RET(prim,sec,adv)\ {\ MetaphAdd ( sPrimary, sSecondary, prim, sec );\ return (adv);\ } #define ADD(prim,sec)\ MetaphAdd ( sPrimary, sSecondary, prim, sec ) static int ProcessCode ( int iCode, int iCur, CurrentWord_t & Word, BYTE * sPrimary, BYTE * sSecondary ) { BYTE * pWord = Word.pWord; // codepoints, not bytes int iAdvance = 1; // bytes, not codepoints int iLast = Word.iLength - 1; switch ( iCode ) { case 'A': case 'E': case 'I': case 'O': case 'U': case 'Y': // all init vowels now map to 'A' if ( !iCur ) ADD ( "A", "A" ); break; case 'B': // "-mb", e.g", "dumb", already skipped over... ADD_RET ( "P", "P", ( pWord[iCur+1]=='B' ) ? 2 : 1 ) case 0xC7: case 0xE7: ADD_RET ( "S", "S", 1 ) case 'C': // various germanic if ( iCur > 1 && !IsVowel ( pWord[iCur-2] ) && StrAt ( Word, iCur-1, 3, "ACH" ) && ( pWord[iCur+2]!='I' && ( pWord[iCur+2]!='E' || StrAt ( Word, iCur-2, 6, "BACHER", "MACHER" ) ) ) ) ADD_RET ( "K", "K", 2 ) // special case 'caesar' if ( iCur==0 && StrAt ( Word, 0, 6, "CAESAR" ) ) ADD_RET ( "S", "S", 2 ) // italian 'chianti' if ( StrAt ( Word, iCur, 4, "CHIA" ) ) ADD_RET ( "K", "K", 2 ) if ( StrAt ( Word, iCur, 2, "CH" ) ) { // find 'michael' if ( iCur > 0 && StrAt ( Word, iCur, 4, "CHAE" ) ) ADD_RET ( "K", "X", 2 ) // greek roots e.g. 'chemistry', 'chorus' if ( iCur==0 && ( StrAt ( Word, iCur+1, 5, "HARAC", "HARIS" ) || StrAt ( Word, iCur+1, 3, "HOR", "HYM", "HIA", "HEM" ) ) && !StrAt ( Word, 0, 5, "CHORE" ) ) ADD_RET ( "K", "K", 2 ) // germanic, greek, or otherwise 'ch' for 'kh' sound if ( ( StrAt ( Word, 0, 4, "VAN ", "VON " ) || StrAt ( Word, 0, 3, "SCH" ) ) // 'architect but not 'arch', 'orchestra', 'orchid' || StrAt ( Word, iCur-2, 6, "ORCHES", "ARCHIT", "ORCHID" ) || StrAt ( Word, iCur+2, 1, "T", "S" ) || ( ( StrAt ( Word, iCur-1, 1, "A", "O", "U", "E" ) || iCur==0 ) // e.g., 'wachtler', 'wechsler', but not 'tichner' && ( StrAt ( Word, iCur+2, 1, "L", "R", "N", "M" ) || StrAt ( Word, iCur+2, 1, "B", "H", "F", "V" ) || StrAt ( Word, iCur+2, 1, "W", " " ) ) ) ) { ADD ( "K", "K" ); } else { if ( iCur > 0 ) { if ( StrAt ( Word, 0, 2, "MC" ) ) // e.g., "McHugh" ADD ( "K", "K" ); else ADD ( "X", "K" ); } else ADD ( "X", "X" ); } return 2; } // e.g, 'czerny' if ( StrAt ( Word, iCur, 2, "CZ" ) && !StrAt ( Word, iCur-2, 4, "WICZ" ) ) ADD_RET ( "S", "X", 2 ) // e.g., 'focaccia' if ( StrAt ( Word, iCur+1, 3, "CIA" ) ) ADD_RET ( "X", "X", 3 ) // double 'C', but not if e.g. 'McClellan' if ( StrAt ( Word, iCur, 2, "CC" ) && !( iCur==1 && pWord[0]=='M' ) ) { // 'bellocchio' but not 'bacchus' if ( StrAt ( Word, iCur+2, 1, "I", "E", "H" ) && !StrAt ( Word, iCur+2, 2, "HU" ) ) { // 'accident', 'accede' 'succeed' if ( ( iCur==1 && pWord[iCur-1]=='A' ) || StrAt ( Word, iCur-1, 5, "UCCEE", "UCCES" ) ) ADD_RET ( "KS", "KS", 2 ) else // 'bacci', 'bertucci', other italian ADD_RET ( "X", "X", 2 ) } else // Pierce's rule ADD_RET ( "K", "K", 2 ) } if ( StrAt ( Word, iCur, 2, "CK", "CG", "CQ" ) ) ADD_RET ( "K", "K", 2 ) if ( StrAt ( Word, iCur, 2, "CI", "CE", "CY" ) ) { // italian vs. english if ( StrAt ( Word, iCur, 3, "CIO", "CIE", "CIA" ) ) ADD_RET ( "S", "X", 2 ) else ADD_RET ( "S", "S", 2 ) } // else ADD ( "K", "K" ); // name sent in 'mac caffrey', 'mac gregor if ( StrAt ( Word, iCur+1, 2, " C", " Q", " G" ) ) return 3; else { if ( StrAt ( Word, iCur+1, 1, "C", "K", "Q" ) && !StrAt ( Word, iCur+1, 2, "CE", "CI" ) ) return 2; } break; case 'D': if ( StrAt ( Word, iCur, 2, "DG" ) ) { if ( StrAt ( Word, iCur+2, 1, "I", "E", "Y" ) ) // e.g. 'edge' ADD_RET ( "J", "J", 3 ) else // e.g. 'edgar' ADD_RET ( "TK", "TK", 2 ) } if ( StrAt ( Word, iCur, 2, "DT", "DD" ) ) ADD_RET ( "T", "T", 2 ) // else ADD_RET ( "T", "T", 1 ) case 'F': ADD_RET ( "F", "F", pWord[iCur+1]=='F' ? 2 : 1 ) case 'G': if ( pWord[iCur+1]=='H' ) { if ( iCur > 0 && !IsVowel ( pWord[iCur-1] ) ) ADD_RET ( "K", "K", 2 ) if ( iCur < 3 ) { // 'ghislane', ghiradelli if ( iCur==0 ) { if ( pWord[iCur+2]=='I' ) ADD_RET ( "J", "J", 2 ) else ADD_RET ( "K", "K", 2 ) } } // Parker's rule (with some further refinements) - e.g., 'hugh' if ( ( iCur > 1 && StrAt ( Word, iCur-2, 1, "B", "H", "D" ) ) || ( iCur > 2 && StrAt ( Word, iCur-3, 1, "B", "H", "D" ) ) // e.g., 'bough' || ( iCur > 3 && StrAt ( Word, iCur-4, 1, "B", "H" ) ) ) // e.g., 'broughton' return 2; else { // e.g., 'laugh', 'McLaughlin', 'cough', 'gough', 'rough', 'tough' if ( iCur > 2 && pWord[iCur-1]=='U' && StrAt ( Word, iCur-3, 1, "C", "G", "L", "R", "T" ) ) ADD ( "F", "F" ); else if ( iCur > 0 && pWord[iCur-1]!='I' ) ADD ( "K", "K" ); return 2; } } if ( pWord[iCur+1]=='N' ) { if ( iCur==1 && IsVowel ( pWord[0] ) && !SlavoGermanic ( pWord ) ) ADD ( "KN", "N" ); else // not e.g. 'cagney' if ( !StrAt ( Word, iCur+2, 2, "EY" ) && pWord[iCur+1]!='Y' && !SlavoGermanic ( pWord ) ) ADD ( "N", "KN" ); else ADD ( "KN", "KN" ); return 2; } // 'tagliaro' if ( StrAt ( Word, iCur+1, 2, "LI" ) && !SlavoGermanic ( pWord ) ) ADD_RET ( "KL", "L", 2 ) // -ges-,-gep-,-gel-, -gie- at beginning if ( iCur==0 && ( pWord[iCur+1]=='Y' || StrAt ( Word, iCur+1, 2, "ES", "EP", "EB", "EL" ) || StrAt ( Word, iCur+1, 2, "EY", "IB", "IL", "IN" ) || StrAt ( Word, iCur+1, 2, "IE", "EI", "ER" ) ) ) ADD_RET ( "K", "J", 2 ) // -ger-, -gy- if ( ( StrAt ( Word, iCur+1, 2, "ER" ) || pWord[iCur+1]=='Y' ) && !StrAt ( Word, 0, 6, "DANGER", "RANGER", "MANGER" ) && !StrAt ( Word, iCur-1, 1, "E", "I" ) && !StrAt ( Word, iCur-1, 3, "RGY", "OGY" ) ) ADD_RET ( "K", "J", 2 ) // italian e.g, 'biaggi' if ( StrAt ( Word, iCur+1, 1, "E", "I", "Y" ) || StrAt ( Word, iCur-1, 4, "AGGI", "OGGI" ) ) { // obvious germanic if ( StrAt ( Word, 0, 4, "VAN ", "VON " ) || StrAt ( Word, 0, 3, "SCH" ) || StrAt ( Word, iCur+1, 2, "ET" ) ) ADD ( "K", "K" ); else { // always soft if french ending if ( StrAt ( Word, iCur+1, 4, "IER " ) ) ADD ( "J", "J" ); else ADD ( "J", "K" ); } return 2; } ADD_RET ( "K", "K", pWord[iCur+1]=='G' ? 2 : 1 ) case 'H': // only keep if first & before vowel or btw. 2 vowels if ( ( iCur==0 || IsVowel ( pWord[iCur-1] ) ) && IsVowel ( pWord[iCur+1] ) ) ADD_RET ( "H", "H", 2 ) break; // also takes care of 'HH' case 'J': // obvious spanish, 'jose', 'san jacinto' if ( StrAt ( Word, iCur, 4, "JOSE" ) || StrAt ( Word, 0, 4, "SAN " ) ) { if ( ( iCur==0 && pWord[iCur+4]==' ' ) || StrAt ( Word, 0, 4, "SAN " ) ) ADD_RET ( "H", "H", 1 ) else ADD_RET ( "J", "H", 1 ) } if ( iCur==0 && !StrAt ( Word, iCur, 4, "JOSE" ) ) ADD ( "J", "A" ); // Yankelovich/Jankelowicz else { // spanish pron. of e.g. 'bajador' if ( ( iCur>0 && IsVowel ( pWord[iCur-1] ) )&& !SlavoGermanic ( pWord ) && ( pWord[iCur+1]=='A' || pWord[iCur+1]=='O' ) ) ADD ( "J", "H" ); else { if ( iCur==iLast ) ADD ( "J", "" ); else if ( !StrAt ( Word, iCur+1, 1, "L", "T", "K", "S" ) && !StrAt ( Word, iCur+1, 1, "N", "M", "B", "Z" ) && !StrAt ( Word, iCur-1, 1, "S", "K", "L" ) ) ADD ( "J", "J" ); } } if ( pWord[iCur+1]=='J' ) // it could happen! return 2; break; case 'K': ADD_RET ( "K", "K", pWord[iCur+1]=='K' ? 2 : 1 ) case 'L': if ( pWord[iCur+1]=='L' ) { // spanish e.g. 'cabrillo', 'gallegos' if ( ( iCur==iLast-2 && StrAt ( Word, iCur-1, 4, "ILLO", "ILLA", "ALLE" ) ) || ( ( StrAt ( Word, iLast - 1, 2, "AS", "OS" ) || StrAt ( Word, iLast, 1, "A", "O" ) ) && StrAt ( Word, iCur-1, 4, "ALLE" ) ) ) ADD_RET ( "L", "", 2 ) iAdvance = 2; } ADD ( "L", "L" ); break; case 'M': ADD ( "M", "M" ); // 'dumb','thumb' if ( ( StrAt ( Word, iCur-1, 3, "UMB" ) && ( iCur+1==iLast || StrAt ( Word, iCur+2, 2, "ER" ) ) ) || pWord[iCur+1]=='M' ) return 2; break; case 'N': ADD_RET ( "N", "N", pWord[iCur+1]=='N' ? 2 : 1 ) case 0xD1: case 0xF1: ADD_RET ( "N", "N", 1 ) case 'P': if ( pWord[iCur+1]=='H' ) ADD_RET ( "F", "F", 2 ) // also account for "campbell", "raspberry" ADD_RET ( "P", "P", StrAt ( Word, iCur+1, 1, "P", "B" ) ? 2 : 1 ) case 'Q': ADD_RET ( "K", "K", pWord[iCur+1]=='Q' ? 2 : 1 ) case 'R': // french e.g. 'rogier', but exclude 'hochmeier' if ( iCur==iLast && !SlavoGermanic ( pWord ) && StrAt ( Word, iCur-2, 2, "IE" ) && !StrAt ( Word, iCur-4, 2, "ME", "MA" ) ) ADD ( "", "R" ); else ADD ( "R", "R" ); return pWord[iCur+1]=='R' ? 2 : 1; case 'S': // special cases 'island', 'isle', 'carlisle', 'carlysle' if ( StrAt ( Word, iCur-1, 3, "ISL", "YSL" ) ) return 1; // special case 'sugar-' if ( iCur==0 && StrAt ( Word, iCur, 5, "SUGAR" ) ) ADD_RET ( "X", "S", 1 ) if ( StrAt ( Word, iCur, 2, "SH" ) ) { // germanic if ( StrAt ( Word, iCur+1, 4, "HEIM", "HOEK", "HOLM", "HOLZ" ) ) ADD_RET ( "S", "S", 2 ) else ADD_RET ( "X", "X", 2 ) } // italian & armenian if ( StrAt ( Word, iCur, 3, "SIO", "SIA" ) || StrAt ( Word, iCur, 4, "SIAN" ) ) { if ( !SlavoGermanic ( pWord ) ) ADD_RET ( "S", "X", 3 ) else ADD_RET ( "S", "S", 3 ) } // german & anglicisations, e.g. 'smith' match 'schmidt', 'snider' match 'schneider' // also, -sz- in slavic language altho in hungarian it is pronounced 's' if ( ( iCur==0 && StrAt ( Word, iCur+1, 1, "M", "N", "L", "W" ) ) || StrAt ( Word, iCur+1, 1, "Z" ) ) ADD_RET ( "S", "X", StrAt ( Word, iCur+1, 1, "Z" ) ? 2 : 1 ) if ( StrAt ( Word, iCur, 2, "SC" ) ) { // Schlesinger's rule if ( pWord[iCur+2]=='H' ) { if ( StrAt ( Word, iCur+3, 2, "OO", "ER", "EN", "UY" ) || StrAt ( Word, iCur+3, 2, "ED", "EM" ) ) // dutch origin, e.g. 'school', 'schooner' { // 'schermerhorn', 'schenker' if ( StrAt ( Word, iCur+3, 2, "ER", "EN" ) ) ADD_RET ( "X", "SK", 3 ) else ADD_RET ( "SK", "SK", 3 ) } else { if ( iCur==0 && !IsVowel ( pWord[3] ) && pWord[3]!='W' ) ADD_RET ( "X", "S", 3 ) else ADD_RET ( "X", "X", 3 ) } } if ( StrAt ( Word, iCur+2, 1, "I", "E", "Y" ) ) ADD_RET ( "S", "S", 3 ) // else ADD_RET ( "SK", "SK", 3 ) } // french e.g. 'resnais', 'artois' if ( iCur==iLast && StrAt ( Word, iCur-2, 2, "AI", "OI" ) ) ADD ( "", "S" ); else ADD ( "S", "S" ); return StrAt ( Word, iCur+1, 1, "S", "Z" ) ? 2 : 1; case 'T': if ( StrAt ( Word, iCur, 4, "TION" ) ) ADD_RET ( "X", "X", 3 ) if ( StrAt ( Word, iCur, 3, "TIA", "TCH" ) ) ADD_RET ( "X", "X", 3 ) if ( StrAt ( Word, iCur, 2, "TH" ) || StrAt ( Word, iCur, 3, "TTH" ) ) { // special case 'thomas', 'thames' or germanic if ( StrAt ( Word, iCur+2, 2, "OM", "AM" ) || StrAt ( Word, 0, 4, "VAN ", "VON " ) || StrAt ( Word, 0, 3, "SCH" ) ) ADD_RET ( "T", "T", 2 ) else ADD_RET ( "0", "T", 2 ) // yes, zero } ADD_RET ( "T", "T", StrAt ( Word, iCur+1, 1, "T", "D" ) ? 2 : 1 ) case 'V': ADD_RET ( "F", "F", pWord[iCur+1]=='V' ? 2 : 1 ) case 'W': // can also be in middle of word if ( StrAt ( Word, iCur, 2, "WR" ) ) ADD_RET ( "R", "R", 2 ) if ( iCur==0 && ( IsVowel ( pWord[iCur+1] ) || StrAt ( Word, iCur, 2, "WH" ) ) ) { // Wasserman should match Vasserman if ( IsVowel ( pWord[iCur+1] ) ) ADD ( "A", "F" ); else // need Uomo to match Womo ADD ( "A", "A" ); } // Arnow should match Arnoff if ( ( iCur==iLast && iCur > 0 && IsVowel ( pWord[iCur-1] ) ) || StrAt ( Word, iCur-1, 5, "EWSKI", "EWSKY", "OWSKI", "OWSKY" ) || StrAt ( Word, 0, 3, "SCH" ) ) ADD_RET ( "", "F", 1 ) // polish e.g. 'filipowicz' if ( StrAt ( Word, iCur, 4, "WICZ", "WITZ" ) ) ADD_RET ( "TS", "FX", 4 ) break; case 'X': // french e.g. breaux if ( !( iCur==iLast && ( StrAt ( Word, iCur-3, 3, "IAU", "EAU" ) || StrAt ( Word, iCur-2, 2, "AU", "OU" ) ) ) ) ADD ( "KS", "KS" ); return ( pWord[iCur+1]=='C' || pWord[iCur+1]=='X' ) ? 2 : 1; case 'Z': // chinese pinyin e.g. 'zhao' if ( pWord[iCur+1]=='H' ) ADD_RET ( "J", "J", 2 ) else if ( StrAt ( Word, iCur+1, 2, "ZO", "ZI", "ZA" ) || ( SlavoGermanic ( pWord ) && ( iCur > 0 && pWord[iCur-1]!='T' ) ) ) MetaphAdd ( sPrimary, sSecondary, "S", "TS" ); else MetaphAdd ( sPrimary, sSecondary, "S", "S" ); return pWord[iCur+1]=='Z' ? 2 : 1; } return iAdvance; } void stem_dmetaphone ( BYTE * pWord, bool bUTF8 ) { BYTE sOriginal [3*SPH_MAX_WORD_LEN+3]; BYTE sPrimary [3*SPH_MAX_WORD_LEN+3]; BYTE sSecondary [ 3*SPH_MAX_WORD_LEN+3 ]; int iLength = strlen ( (const char *)pWord ); memcpy ( sOriginal, pWord, iLength + 1 ); sPrimary[0] = '\0'; sSecondary[0] = '\0'; BYTE * pStart = sOriginal; while ( *pStart ) { if ( *pStart>='a' && *pStart<='z' ) *pStart = (BYTE) toupper ( *pStart ); ++pStart; } strcat ( (char *) sOriginal, " " ); // NOLINT int iAdvance = 0; CurrentWord_t Word; Word.pWord = sOriginal; Word.iLength = iLength; Word.iLengthPadded = strlen ( (const char *)sOriginal ); // skip these when at start of word if ( StrAt ( Word, 0, 2, "GN", "KN", "PN", "WR", "PS" ) ) iAdvance = 1; // Initial 'X' is pronounced 'Z' e.g. 'Xavier' if ( sOriginal[0]=='X' ) { ADD ( "S", "S" ); // 'Z' maps to 'S' iAdvance = 1; } BYTE * pPtr = sOriginal; BYTE * pLastPtr = sOriginal; int iCode = -1; if ( bUTF8 ) iCode = sphUTF8Decode ( pPtr ); while ( iCode!=0 ) { int iCur = ( bUTF8 ? pLastPtr : pPtr ) - sOriginal; if ( iCur>=iLength ) break; if ( bUTF8 ) { for ( int i = 0; i < iAdvance; ++i ) { pLastPtr = pPtr; iCode = sphUTF8Decode ( pPtr ); } } else { pPtr += iAdvance; iCode = *pPtr; } if ( iCode<=0 ) break; // unknown code: don't copy, just return if ( bUTF8 && iCode>128 && iCode!=0xC7 && iCode!=0xE7 && iCode!=0xD1 && iCode!=0xF1 ) return; iAdvance = ProcessCode ( iCode, ( bUTF8 ? pLastPtr : pPtr ) - sOriginal, Word, sPrimary, sSecondary ); } if ( !pWord[0] || sPrimary [0] ) strcpy ( (char*)pWord, (char*)sPrimary ); // NOLINT // TODO: handle secondary too } // // $Id: sphinxmetaphone.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxquery.cpp0000644000176700017710000013026311720727645017655 0ustar deogardeogar// // $Id: sphinxquery.cpp 3114 2012-02-21 14:52:21Z klirichek $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxquery.h" #include "sphinxutils.h" #include ////////////////////////////////////////////////////////////////////////// // EXTENDED PARSER RELOADED ////////////////////////////////////////////////////////////////////////// #include "yysphinxquery.h" ////////////////////////////////////////////////////////////////////////// class XQParser_t { public: XQParser_t (); ~XQParser_t () {} public: bool Parse ( XQQuery_t & tQuery, const char * sQuery, const ISphTokenizer * pTokenizer, const CSphSchema * pSchema, CSphDict * pDict, int iStopwordStep ); bool Error ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ); void Warning ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ); bool AddField ( CSphSmallBitvec & dFields, const char * szField, int iLen ); bool ParseFields ( CSphSmallBitvec & uFields, int & iMaxFieldPos ); int ParseZone ( const char * pZone ); bool IsSpecial ( char c ); int GetToken ( YYSTYPE * lvalp ); void AddQuery ( XQNode_t * pNode ); XQNode_t * AddKeyword ( const char * sKeyword, DWORD uStar = STAR_NONE ); XQNode_t * AddKeyword ( XQNode_t * pLeft, XQNode_t * pRight ); XQNode_t * AddOp ( XQOperator_e eOp, XQNode_t * pLeft, XQNode_t * pRight, int iOpArg=0 ); void Cleanup (); XQNode_t * SweepNulls ( XQNode_t * pNode ); bool FixupNots ( XQNode_t * pNode ); inline void SetFieldSpec ( const CSphSmallBitvec& uMask, int iMaxPos ) { m_dStateSpec.SetFieldSpec ( uMask, iMaxPos ); } inline void SetZoneVec ( int iZoneVec ) { m_dStateSpec.SetZoneSpec ( m_dZoneVecs[iZoneVec] ); } public: const CSphVector & GetZoneVec ( int iZoneVec ) const { return m_dZoneVecs[iZoneVec]; } public: XQQuery_t * m_pParsed; BYTE * m_sQuery; int m_iQueryLen; const char * m_pLastTokenStart; const CSphSchema * m_pSchema; ISphTokenizer * m_pTokenizer; CSphDict * m_pDict; const char * m_pCur; CSphVector m_dSpawned; XQNode_t * m_pRoot; bool m_bStopOnInvalid; int m_iAtomPos; int m_iPendingNulls; int m_iPendingType; YYSTYPE m_tPendingToken; bool m_bWasBlended; bool m_bEmpty; bool m_bQuoted; bool m_bEmptyStopword; CSphVector m_dIntTokens; CSphVector < CSphVector > m_dZoneVecs; XQLimitSpec_t m_dStateSpec; }; ////////////////////////////////////////////////////////////////////////// int yylex ( YYSTYPE * lvalp, XQParser_t * pParser ) { return pParser->GetToken ( lvalp ); } void yyerror ( XQParser_t * pParser, const char * sMessage ) { if ( pParser->m_pParsed->m_sParseError.IsEmpty() ) pParser->m_pParsed->m_sParseError.SetSprintf ( "%s near '%s'", sMessage, pParser->m_pLastTokenStart ); } #include "yysphinxquery.c" ////////////////////////////////////////////////////////////////////////// void XQLimitSpec_t::SetFieldSpec ( const CSphSmallBitvec& uMask, int iMaxPos ) { m_bFieldSpec = true; m_dFieldMask = uMask; m_iFieldMaxPos = iMaxPos; } void XQNode_t::SetFieldSpec ( const CSphSmallBitvec& uMask, int iMaxPos ) { // set it, if we do not yet have one if ( !m_dSpec.m_bFieldSpec ) m_dSpec.SetFieldSpec ( uMask, iMaxPos ); // some of the children might not yet have a spec, even if the node itself has // eg. 'hello @title world' (whole node has '@title' spec but 'hello' node does not have any!) ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->SetFieldSpec ( uMask, iMaxPos ); } void XQLimitSpec_t::SetZoneSpec ( const CSphVector & dZones ) { m_dZones = dZones; } void XQNode_t::SetZoneSpec ( const CSphVector & dZones ) { // set it, if we do not yet have one if ( !m_dSpec.m_dZones.GetLength() ) m_dSpec.SetZoneSpec ( dZones ); // some of the children might not yet have a spec, even if the node itself has ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->SetZoneSpec ( dZones ); } void XQNode_t::CopySpecs ( const XQNode_t * pSpecs ) { if ( !pSpecs ) return; if ( !m_dSpec.m_bFieldSpec ) m_dSpec.SetFieldSpec ( pSpecs->m_dSpec.m_dFieldMask, pSpecs->m_dSpec.m_iFieldMaxPos ); if ( !m_dSpec.m_dZones.GetLength() ) m_dSpec.SetZoneSpec ( pSpecs->m_dSpec.m_dZones ); } void XQNode_t::ClearFieldMask () { m_dSpec.m_dFieldMask.Set(); ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->ClearFieldMask(); } bool XQNode_t::IsEqualTo ( const XQNode_t * pNode ) { if ( !pNode || pNode->GetHash()!=GetHash() || pNode->GetOp()!=GetOp() ) return false; if ( m_dWords.GetLength() ) { // two plain nodes. let's compare the keywords if ( pNode->m_dWords.GetLength()!=m_dWords.GetLength() ) return false; if ( !m_dWords.GetLength() ) return true; SmallStringHash_T hSortedWords; ARRAY_FOREACH ( i, pNode->m_dWords ) hSortedWords.Add ( 0, pNode->m_dWords[i].m_sWord ); ARRAY_FOREACH ( i, m_dWords ) if ( !hSortedWords.Exists ( m_dWords[i].m_sWord ) ) return false; return true; } // two non-plain nodes. let's compare the children if ( pNode->m_dChildren.GetLength()!=m_dChildren.GetLength() ) return false; if ( !m_dChildren.GetLength() ) return true; ARRAY_FOREACH ( i, m_dChildren ) if ( !pNode->m_dChildren[i]->IsEqualTo ( m_dChildren[i] ) ) return false; return true; } uint64_t XQNode_t::GetHash() const { if ( m_iMagicHash ) return m_iMagicHash; XQOperator_e dZeroOp[2]; dZeroOp[0] = m_eOp; dZeroOp[1] = (XQOperator_e) 0; ARRAY_FOREACH ( i, m_dWords ) m_iMagicHash = 100 + ( m_iMagicHash ^ sphFNV64 ( (const BYTE*)m_dWords[i].m_sWord.cstr() ) ); ///< +100 to make it non-transitive ARRAY_FOREACH ( j, m_dChildren ) m_iMagicHash = 100 + ( m_iMagicHash ^ m_dChildren[j]->GetHash() ); ///< +100 to make it non-transitive m_iMagicHash += 1000000; ///< to immerse difference between parents and children m_iMagicHash ^= sphFNV64 ( (const BYTE*)dZeroOp ); return m_iMagicHash; } void XQNode_t::SetOp ( XQOperator_e eOp, XQNode_t * pArg1, XQNode_t * pArg2 ) { m_eOp = eOp; m_dChildren.Reset(); if ( pArg1 ) m_dChildren.Add ( pArg1 ); if ( pArg2 ) m_dChildren.Add ( pArg2 ); } ////////////////////////////////////////////////////////////////////////// XQParser_t::XQParser_t () : m_pParsed ( NULL ) , m_pLastTokenStart ( NULL ) , m_pRoot ( NULL ) , m_bStopOnInvalid ( true ) , m_bWasBlended ( false ) , m_bQuoted ( false ) , m_bEmptyStopword ( false ) { } /// cleanup spawned nodes (for bailing out on errors) void XQParser_t::Cleanup () { m_dSpawned.Uniq(); // FIXME! should eliminate this by testing ARRAY_FOREACH ( i, m_dSpawned ) { m_dSpawned[i]->m_dChildren.Reset (); SafeDelete ( m_dSpawned[i] ); } m_dSpawned.Reset (); } bool XQParser_t::Error ( const char * sTemplate, ... ) { assert ( m_pParsed ); char sBuf[256]; const char * sPrefix = "query error: "; int iPrefix = strlen(sPrefix); memcpy ( sBuf, sPrefix, iPrefix ); va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf+iPrefix, sizeof(sBuf)-iPrefix, sTemplate, ap ); va_end ( ap ); m_pParsed->m_sParseError = sBuf; return false; } void XQParser_t::Warning ( const char * sTemplate, ... ) { assert ( m_pParsed ); char sBuf[256]; const char * sPrefix = "query warning: "; int iPrefix = strlen(sPrefix); memcpy ( sBuf, sPrefix, iPrefix ); va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf+iPrefix, sizeof(sBuf)-iPrefix, sTemplate, ap ); va_end ( ap ); m_pParsed->m_sParseWarning = sBuf; } /// my special chars bool XQParser_t::IsSpecial ( char c ) { return c=='(' || c==')' || c=='|' || c=='-' || c=='!' || c=='@' || c=='~' || c=='"' || c=='/'; } /// lookup field and add it into mask bool XQParser_t::AddField ( CSphSmallBitvec & dFields, const char * szField, int iLen ) { CSphString sField; sField.SetBinary ( szField, iLen ); int iField = m_pSchema->GetFieldIndex ( sField.cstr () ); if ( iField < 0 ) { if ( m_bStopOnInvalid ) return Error ( "no field '%s' found in schema", sField.cstr () ); else Warning ( "no field '%s' found in schema", sField.cstr () ); } else { if ( iField>=SPH_MAX_FIELDS ) return Error ( " max %d fields allowed", SPH_MAX_FIELDS ); dFields.Set(iField); } return true; } /// parse fields block bool XQParser_t::ParseFields ( CSphSmallBitvec & dFields, int & iMaxFieldPos ) { dFields.Unset(); iMaxFieldPos = 0; const char * pPtr = m_pTokenizer->GetBufferPtr (); const char * pLastPtr = m_pTokenizer->GetBufferEnd (); if ( pPtr==pLastPtr ) return true; // silently ignore trailing field operator bool bNegate = false; bool bBlock = false; // handle special modifiers if ( *pPtr=='!' ) { // handle @! and @!( bNegate = true; pPtr++; if ( *pPtr=='(' ) { bBlock = true; pPtr++; } } else if ( *pPtr=='*' ) { // handle @* dFields.Set(); m_pTokenizer->SetBufferPtr ( pPtr+1 ); return true; } else if ( *pPtr=='(' ) { // handle @( bBlock = true; pPtr++; } // handle invalid chars if ( !sphIsAlpha(*pPtr) ) { m_pTokenizer->SetBufferPtr ( pPtr ); // ignore and re-parse (FIXME! maybe warn?) return true; } assert ( sphIsAlpha(*pPtr) ); // i think i'm paranoid // handle field specification if ( !bBlock ) { // handle standalone field specification const char * pFieldStart = pPtr; while ( sphIsAlpha(*pPtr) && pPtr0 ); if ( !AddField ( dFields, pFieldStart, pPtr-pFieldStart ) ) return false; m_pTokenizer->SetBufferPtr ( pPtr ); if ( bNegate && ( !dFields.TestAll() ) ) dFields.Negate(); } else { // handle fields block specification assert ( sphIsAlpha(*pPtr) && bBlock ); // and complicated bool bOK = false; const char * pFieldStart = NULL; while ( pPtrSetBufferPtr ( ++pPtr ); if ( bNegate && ( !dFields.TestAll() ) ) dFields.Negate(); bOK = true; break; } else { return Error ( "invalid character '%c' in field block operator", *pPtr ); } } if ( !bOK ) return Error ( "missing closing ')' in field block operator" ); } // handle optional position range modifier if ( pPtr[0]=='[' && isdigit ( pPtr[1] ) ) { // skip '[' and digits const char * p = pPtr+1; while ( *p && isdigit(*p) ) p++; // check that the range ends with ']' (FIXME! maybe report an error if it does not?) if ( *p!=']' ) return true; // fetch my value iMaxFieldPos = strtoul ( pPtr+1, NULL, 10 ); m_pTokenizer->SetBufferPtr ( p+1 ); } // well done return true; } /// helper find-or-add (make it generic and move to sphinxstd?) static int GetZoneIndex ( XQQuery_t * pQuery, const CSphString & sZone ) { ARRAY_FOREACH ( i, pQuery->m_dZones ) if ( pQuery->m_dZones[i]==sZone ) return i; pQuery->m_dZones.Add ( sZone ); return pQuery->m_dZones.GetLength()-1; } /// parse zone int XQParser_t::ParseZone ( const char * pZone ) { const char * p = pZone; // case one, just a single zone name if ( sphIsAlpha ( *pZone ) ) { // find zone name while ( sphIsAlpha(*p) ) p++; m_pTokenizer->SetBufferPtr ( p ); // extract and lowercase it CSphString sZone; sZone.SetBinary ( pZone, p-pZone ); sZone.ToLower(); // register it in zones list int iZone = GetZoneIndex ( m_pParsed, sZone ); // create new 1-zone vector m_dZoneVecs.Add().Add ( iZone ); return m_dZoneVecs.GetLength()-1; } // case two, zone block // it must follow strict (name1,name2,...) syntax if ( *pZone=='(' ) { // create new zone vector CSphVector & dZones = m_dZoneVecs.Add(); p = ++pZone; // scan names for ( ;; ) { // syntax error, name expected! if ( !sphIsAlpha(*p) ) { Error ( "unexpected character '%c' in zone block operator", *p ); return -1; } // scan next name while ( sphIsAlpha(*p) ) p++; // extract and lowercase it CSphString sZone; sZone.SetBinary ( pZone, p-pZone ); sZone.ToLower(); // register it in zones list dZones.Add ( GetZoneIndex ( m_pParsed, sZone ) ); // must be either followed by comma, or closing paren // everything else will cause syntax error if ( *p==')' ) { m_pTokenizer->SetBufferPtr ( p+1 ); break; } if ( *p==',' ) pZone = ++p; } return m_dZoneVecs.GetLength()-1; } // unhandled case Error ( "internal error, unhandled case in ParseZone()" ); return -1; } /// a lexer of my own int XQParser_t::GetToken ( YYSTYPE * lvalp ) { // what, noone's pending for a bending?! if ( !m_iPendingType ) for ( ;; ) { assert ( m_iPendingNulls==0 ); if ( m_bWasBlended ) m_iAtomPos += m_pTokenizer->SkipBlended(); // tricky stuff // we need to manually check for numbers in certain states (currently, just after proximity or quorum operator) // required because if 0-9 are not in charset_table, or min_word_len is too high, // the tokenizer will *not* return the number as a token! m_pLastTokenStart = m_pTokenizer->GetBufferPtr (); const char * sEnd = m_pTokenizer->GetBufferEnd (); const char * p = m_pLastTokenStart; while ( psToken && p-sTokenGetToken() && m_pTokenizer->TokenIsBlended() ) // number with blended should be tokenized as usual { m_pTokenizer->SkipBlended(); m_pTokenizer->SetBufferPtr ( m_pLastTokenStart ); } else { // got not a very long number followed by a whitespace or special, handle it char sNumberBuf[NUMBER_BUF_LEN]; int iNumberLen = Min ( (int)sizeof(sNumberBuf)-1, int(p-sToken) ); memcpy ( sNumberBuf, sToken, iNumberLen ); sNumberBuf[iNumberLen] = '\0'; m_tPendingToken.tInt.iValue = atoi ( sNumberBuf ); // check if it can be used as a keyword too m_pTokenizer->SetBuffer ( (BYTE*)sNumberBuf, iNumberLen ); sToken = (const char*) m_pTokenizer->GetToken(); m_pTokenizer->SetBuffer ( m_sQuery, m_iQueryLen ); m_pTokenizer->SetBufferPtr ( p ); m_tPendingToken.tInt.iStrIndex = -1; if ( sToken ) { m_dIntTokens.Add ( sToken ); if ( m_pDict->GetWordID ( (BYTE*)sToken ) ) m_tPendingToken.tInt.iStrIndex = m_dIntTokens.GetLength()-1; else m_dIntTokens.Pop(); m_iAtomPos++; } m_iPendingNulls = 0; m_iPendingType = TOK_INT; break; } } // not a number, long number, or number not followed by a whitespace, so fallback to regular tokenizing sToken = (const char *) m_pTokenizer->GetToken (); if ( !sToken ) { m_iPendingNulls = m_pTokenizer->GetOvershortCount (); if ( !m_iPendingNulls ) return 0; m_iPendingNulls = 0; lvalp->pNode = AddKeyword ( NULL ); return TOK_KEYWORD; } // now let's do some token post-processing m_bWasBlended = m_pTokenizer->TokenIsBlended(); m_bEmpty = false; m_iPendingNulls = m_pTokenizer->GetOvershortCount (); m_iAtomPos += 1+m_iPendingNulls; // handle NEAR (must be case-sensitive, and immediately followed by slash and int) if ( sToken && p && !m_pTokenizer->m_bPhrase && strncmp ( p, "NEAR/", 5 )==0 && isdigit(p[5]) ) { // extract that int int iVal = 0; for ( p=p+5; isdigit(*p); p++ ) iVal = iVal*10 + (*p) - '0'; // FIXME! check for overflow? m_pTokenizer->SetBufferPtr ( p ); // we just lexed our next token m_iPendingType = TOK_NEAR; m_tPendingToken.tInt.iValue = iVal; m_tPendingToken.tInt.iStrIndex = -1; m_iAtomPos -= 1; // skip NEAR break; } // handle SENTENCE if ( sToken && p && !m_pTokenizer->m_bPhrase && !strcasecmp ( sToken, "sentence" ) && !strncmp ( p, "SENTENCE", 8 ) ) { // we just lexed our next token m_iPendingType = TOK_SENTENCE; m_iAtomPos -= 1; break; } // handle PARAGRAPH if ( sToken && p && !m_pTokenizer->m_bPhrase && !strcasecmp ( sToken, "paragraph" ) && !strncmp ( p, "PARAGRAPH", 9 ) ) { // we just lexed our next token m_iPendingType = TOK_PARAGRAPH; m_iAtomPos -= 1; break; } // handle ZONE if ( sToken && p && !m_pTokenizer->m_bPhrase && !strncmp ( p, "ZONE:", 5 ) && ( sphIsAlpha(p[5]) || p[5]=='(' ) ) { // ParseZone() will update tokenizer buffer ptr as needed int iVec = ParseZone ( p+5 ); if ( iVec<0 ) return -1; // we just lexed our next token m_iPendingType = TOK_ZONE; m_tPendingToken.iZoneVec = iVec; m_iAtomPos -= 1; break; } // handle specials if ( m_pTokenizer->WasTokenSpecial() ) { // specials must not affect pos m_iAtomPos--; // some specials are especially special if ( sToken[0]=='@' ) { // parse fields operator if ( !ParseFields ( m_tPendingToken.tFieldLimit.dMask, m_tPendingToken.tFieldLimit.iMaxPos ) ) return -1; if ( m_pSchema->m_dFields.GetLength()!=SPH_MAX_FIELDS ) m_tPendingToken.tFieldLimit.dMask.LimitBits ( m_pSchema->m_dFields.GetLength() ); m_iPendingType = TOK_FIELDLIMIT; break; } else if ( sToken[0]=='<' ) { if ( *m_pTokenizer->GetBufferPtr()=='<' ) { // got "<<", aka operator BEFORE m_iPendingType = TOK_BEFORE; break; } else { // got stray '<', ignore continue; } } else { // all the other specials are passed to parser verbatim if ( sToken[0]=='"' ) m_bQuoted = !m_bQuoted; m_iPendingType = sToken[0]=='!' ? '-' : sToken[0]; m_pTokenizer->m_bPhrase = m_bQuoted; break; } } // check for stopword, and create that node // temp buffer is required, because GetWordID() might expand (!) the keyword in-place const int MAX_BYTES = 3*SPH_MAX_WORD_LEN + 16; BYTE sTmp [ MAX_BYTES ]; strncpy ( (char*)sTmp, sToken, MAX_BYTES ); sTmp[MAX_BYTES-1] = '\0'; if ( !m_pDict->GetWordID ( sTmp ) ) { sToken = NULL; // stopwords with step=0 must not affect pos if ( m_bEmptyStopword ) m_iAtomPos--; } // information about stars is lost after this point, so was have to save it now DWORD uStarPosition = STAR_NONE; uStarPosition |= *m_pTokenizer->GetTokenEnd()=='*' ? STAR_BACK : 0; uStarPosition |= ( m_pTokenizer->GetTokenStart()!=m_pTokenizer->GetBufferPtr() ) && m_pTokenizer->GetTokenStart()[-1]=='*' ? STAR_FRONT : 0; m_tPendingToken.pNode = AddKeyword ( sToken, uStarPosition ); m_iPendingType = TOK_KEYWORD; if ( m_pTokenizer->TokenIsBlended() ) m_iAtomPos--; break; } // someone must be pending now! assert ( m_iPendingType ); m_bEmpty = false; // ladies first, though if ( m_iPendingNulls>0 ) { m_iPendingNulls--; lvalp->pNode = AddKeyword ( NULL ); return TOK_KEYWORD; } // pending the offending int iRes = m_iPendingType; m_iPendingType = 0; *lvalp = m_tPendingToken; return iRes; } void XQParser_t::AddQuery ( XQNode_t * pNode ) { m_pRoot = pNode; } XQNode_t * XQParser_t::AddKeyword ( const char * sKeyword, DWORD uStarPosition ) { XQKeyword_t tAW ( sKeyword, m_iAtomPos ); tAW.m_uStarPosition = uStarPosition; XQNode_t * pNode = new XQNode_t ( m_dStateSpec ); pNode->m_dWords.Add ( tAW ); m_dSpawned.Add ( pNode ); return pNode; } XQNode_t * XQParser_t::AddKeyword ( XQNode_t * pLeft, XQNode_t * pRight ) { if ( !pLeft || !pRight ) return pLeft ? pLeft : pRight; assert ( pLeft->m_dWords.GetLength()>0 ); assert ( pRight->m_dWords.GetLength()==1 ); pLeft->m_dWords.Add ( pRight->m_dWords[0] ); m_dSpawned.RemoveValue ( pRight ); SafeDelete ( pRight ); return pLeft; } XQNode_t * XQParser_t::AddOp ( XQOperator_e eOp, XQNode_t * pLeft, XQNode_t * pRight, int iOpArg ) { ///////// // unary ///////// if ( eOp==SPH_QUERY_NOT ) { XQNode_t * pNode = new XQNode_t ( m_dStateSpec ); pNode->SetOp ( SPH_QUERY_NOT, pLeft ); m_dSpawned.Add ( pNode ); return pNode; } ////////// // binary ////////// if ( !pLeft || !pRight ) return pLeft ? pLeft : pRight; // left spec always tries to infect the nodes to the right, only brackets can stop it // eg. '@title hello' vs 'world' pRight->CopySpecs ( pLeft ); XQNode_t * pDonor = pRight; if ( pRight->m_dSpec.m_bInvisible ) pDonor = pLeft; m_dStateSpec = pDonor->m_dSpec; // build a new node XQNode_t * pResult = NULL; if ( pLeft->m_dChildren.GetLength() && pLeft->GetOp()==eOp && pLeft->m_iOpArg==iOpArg ) { pLeft->m_dChildren.Add ( pRight ); pResult = pLeft; if ( pRight->m_dSpec.m_bFieldSpec ) pResult->m_dSpec.SetFieldSpec ( pRight->m_dSpec.m_dFieldMask, pRight->m_dSpec.m_iFieldMaxPos ); if ( pRight->m_dSpec.m_dZones.GetLength() ) pResult->m_dSpec.SetZoneSpec ( pRight->m_dSpec.m_dZones ); } else { // however, it's right (!) spec which is chosen for the resulting node, // eg. '@title hello' + 'world @body program' XQNode_t * pNode = new XQNode_t ( pDonor->m_dSpec ); pNode->SetOp ( eOp, pLeft, pRight ); pNode->m_iOpArg = iOpArg; m_dSpawned.Add ( pNode ); pResult = pNode; } return pResult; } XQNode_t * XQParser_t::SweepNulls ( XQNode_t * pNode ) { if ( !pNode ) return NULL; // sweep plain node if ( pNode->m_dWords.GetLength() ) { ARRAY_FOREACH ( i, pNode->m_dWords ) if ( pNode->m_dWords[i].m_sWord.cstr()==NULL ) pNode->m_dWords.Remove ( i-- ); if ( pNode->m_dWords.GetLength()==0 ) { m_dSpawned.RemoveValue ( pNode ); // OPTIMIZE! SafeDelete ( pNode ); return NULL; } return pNode; } // sweep op node ARRAY_FOREACH ( i, pNode->m_dChildren ) { pNode->m_dChildren[i] = SweepNulls ( pNode->m_dChildren[i] ); if ( pNode->m_dChildren[i]==NULL ) pNode->m_dChildren.Remove ( i-- ); } if ( pNode->m_dChildren.GetLength()==0 ) { m_dSpawned.RemoveValue ( pNode ); // OPTIMIZE! SafeDelete ( pNode ); return NULL; } // remove redundancies if needed if ( pNode->GetOp()!=SPH_QUERY_NOT && pNode->m_dChildren.GetLength()==1 ) { XQNode_t * pRet = pNode->m_dChildren[0]; pNode->m_dChildren.Reset (); m_dSpawned.RemoveValue ( pNode ); // OPTIMIZE! SafeDelete ( pNode ); return pRet; } // done return pNode; } bool XQParser_t::FixupNots ( XQNode_t * pNode ) { // no processing for plain nodes if ( !pNode || pNode->m_dWords.GetLength() ) return true; // process 'em children ARRAY_FOREACH ( i, pNode->m_dChildren ) if ( !FixupNots ( pNode->m_dChildren[i] ) ) return false; // extract NOT subnodes CSphVector dNots; ARRAY_FOREACH ( i, pNode->m_dChildren ) if ( pNode->m_dChildren[i]->GetOp()==SPH_QUERY_NOT ) { dNots.Add ( pNode->m_dChildren[i] ); pNode->m_dChildren.RemoveFast ( i-- ); } // no NOTs? we're square if ( !dNots.GetLength() ) return true; // nothing but NOTs? we can't compute that if ( !pNode->m_dChildren.GetLength() ) { m_pParsed->m_sParseError.SetSprintf ( "query is non-computable (node consists of NOT operators only)" ); return false; } // NOT within OR? we can't compute that if ( pNode->GetOp()==SPH_QUERY_OR ) { m_pParsed->m_sParseError.SetSprintf ( "query is non-computable (NOT is not allowed within OR)" ); return false; } // NOT used in before operator if ( pNode->GetOp()==SPH_QUERY_BEFORE ) { m_pParsed->m_sParseError.SetSprintf ( "query is non-computable (NOT cannot be used as before operand)" ); return false; } // must be some NOTs within AND at this point, convert this node to ANDNOT assert ( pNode->GetOp()==SPH_QUERY_AND && pNode->m_dChildren.GetLength() && dNots.GetLength() ); XQNode_t * pAnd = new XQNode_t ( pNode->m_dSpec ); pAnd->SetOp ( SPH_QUERY_AND, pNode->m_dChildren ); m_dSpawned.Add ( pAnd ); XQNode_t * pNot = NULL; if ( dNots.GetLength()==1 ) { pNot = dNots[0]; } else { pNot = new XQNode_t ( pNode->m_dSpec ); pNot->SetOp ( SPH_QUERY_OR, dNots ); m_dSpawned.Add ( pNot ); } pNode->SetOp ( SPH_QUERY_ANDNOT, pAnd, pNot ); return true; } static void DeleteNodesWOFields ( XQNode_t * pNode ) { if ( !pNode ) return; for ( int i = 0; i < pNode->m_dChildren.GetLength (); ) { if ( pNode->m_dChildren[i]->m_dSpec.m_dFieldMask.TestAll() ) { // this should be a leaf node assert ( pNode->m_dChildren[i]->m_dChildren.GetLength()==0 ); SafeDelete ( pNode->m_dChildren[i] ); pNode->m_dChildren.RemoveFast ( i ); } else { DeleteNodesWOFields ( pNode->m_dChildren[i] ); i++; } } } static bool CheckQuorum ( XQNode_t * pNode, CSphString * pError ) { assert ( pError ); if ( !pNode ) return true; if ( pNode->GetOp()==SPH_QUERY_QUORUM && pNode->m_iOpArg<=0 ) { pError->SetSprintf ( "quorum threshold too low (%d)", pNode->m_iOpArg ); return false; } bool bValid = true; ARRAY_FOREACH_COND ( i, pNode->m_dChildren, bValid ) { bValid &= CheckQuorum ( pNode->m_dChildren[i], pError ); } return bValid; } static void FixupDegenerates ( XQNode_t * pNode ) { if ( !pNode ) return; if ( pNode->m_dWords.GetLength()==1 && ( pNode->GetOp()==SPH_QUERY_PHRASE || pNode->GetOp()==SPH_QUERY_PROXIMITY || pNode->GetOp()==SPH_QUERY_QUORUM ) ) { pNode->SetOp ( SPH_QUERY_AND ); return; } ARRAY_FOREACH ( i, pNode->m_dChildren ) FixupDegenerates ( pNode->m_dChildren[i] ); } bool XQParser_t::Parse ( XQQuery_t & tParsed, const char * sQuery, const ISphTokenizer * pTokenizer, const CSphSchema * pSchema, CSphDict * pDict, int iStopwordStep ) { CSphScopedPtr pMyTokenizer ( pTokenizer->Clone ( true ) ); pMyTokenizer->AddSpecials ( "()|-!@~\"/^$<" ); pMyTokenizer->EnableQueryParserMode ( true ); // most outcomes are errors SafeDelete ( tParsed.m_pRoot ); // check for relaxed syntax const char * OPTION_RELAXED = "@@relaxed"; const int OPTION_RELAXED_LEN = strlen ( OPTION_RELAXED ); m_bStopOnInvalid = true; if ( sQuery && strncmp ( sQuery, OPTION_RELAXED, OPTION_RELAXED_LEN )==0 && !sphIsAlpha ( sQuery[OPTION_RELAXED_LEN] ) ) { sQuery += OPTION_RELAXED_LEN; m_bStopOnInvalid = false; } // setup parser m_pParsed = &tParsed; m_sQuery = (BYTE*) sQuery; m_iQueryLen = sQuery ? strlen(sQuery) : 0; m_pTokenizer = pMyTokenizer.Ptr(); m_pSchema = pSchema; m_pDict = pDict; m_pCur = sQuery; m_iAtomPos = 0; m_iPendingNulls = 0; m_iPendingType = 0; m_pRoot = NULL; m_bEmpty = true; m_bEmptyStopword = ( iStopwordStep==0 ); m_pTokenizer->SetBuffer ( m_sQuery, m_iQueryLen ); int iRes = yyparse ( this ); if ( ( iRes || !m_pParsed->m_sParseError.IsEmpty() ) && !m_bEmpty ) { Cleanup (); return false; } DeleteNodesWOFields ( m_pRoot ); m_pRoot = SweepNulls ( m_pRoot ); FixupDegenerates ( m_pRoot ); if ( !FixupNots ( m_pRoot ) ) { Cleanup (); return false; } if ( !CheckQuorum ( m_pRoot, &m_pParsed->m_sParseError ) ) { Cleanup(); return false; } if ( m_pRoot && m_pRoot->GetOp()==SPH_QUERY_NOT ) { Cleanup (); m_pParsed->m_sParseError.SetSprintf ( "query is non-computable (single NOT operator)" ); return false; } // all ok; might want to create a dummy node to indicate that m_dSpawned.Reset(); tParsed.m_pRoot = m_pRoot ? m_pRoot : new XQNode_t ( m_dStateSpec ); return true; } ////////////////////////////////////////////////////////////////////////// #define XQDEBUG 0 #if XQDEBUG static void xqIndent ( int iIndent ) { iIndent *= 2; while ( iIndent-- ) printf ( " " ); } static void xqDump ( XQNode_t * pNode, const CSphSchema & tSch, int iIndent ) { if ( pNode->m_dChildren.GetLength() ) { xqIndent ( iIndent ); switch ( pNode->GetOp() ) { case SPH_QUERY_AND: printf ( "AND:\n" ); break; case SPH_QUERY_OR: printf ( "OR:\n" ); break; case SPH_QUERY_NOT: printf ( "NOT:\n" ); break; case SPH_QUERY_ANDNOT: printf ( "ANDNOT:\n" ); break; case SPH_QUERY_BEFORE: printf ( "BEFORE:\n" ); break; default: printf ( "unknown-op-%d:\n", pNode->GetOp() ); break; } ARRAY_FOREACH ( i, pNode->m_dChildren ) xqDump ( pNode->m_dChildren[i], tSch, iIndent+1 ); } else { xqIndent ( iIndent ); printf ( "MATCH(%d,%d):", pNode->m_uFieldMask, pNode->m_iOpArg ); ARRAY_FOREACH ( i, pNode->m_dWords ) { const XQKeyword_t & tWord = pNode->m_dWords[i]; const char * sLocTag = ""; if ( tWord.m_bFieldStart ) sLocTag = ", start"; if ( tWord.m_bFieldEnd ) sLocTag = ", end"; printf ( " %s (qpos %d%s)", tWord.m_sWord.cstr(), tWord.m_iAtomPos, sLocTag ); } printf ( "\n" ); } } #endif bool sphParseExtendedQuery ( XQQuery_t & tParsed, const char * sQuery, const ISphTokenizer * pTokenizer, const CSphSchema * pSchema, CSphDict * pDict, int iStopwordStep ) { XQParser_t qp; bool bRes = qp.Parse ( tParsed, sQuery, pTokenizer, pSchema, pDict, iStopwordStep ); #ifndef NDEBUG if ( bRes && tParsed.m_pRoot ) tParsed.m_pRoot->Check ( true ); #endif #if XQDEBUG if ( bRes ) { printf ( "--- query ---\n" ); xqDump ( tParsed.m_pRoot, *pSchema, 0 ); printf ( "---\n" ); } #endif return bRes; } ////////////////////////////////////////////////////////////////////////// // COMMON SUBTREES DETECTION ////////////////////////////////////////////////////////////////////////// /// Decides if given pTree is appropriate for caching or not. Currently we don't cache /// the end values (leafs). static bool IsAppropriate ( XQNode_t * pTree ) { if ( !pTree ) return false; // skip nodes that actually are leaves (eg. "AND smth" node instead of merely "smth") return !( pTree->m_dWords.GetLength()==1 && pTree->GetOp()!=SPH_QUERY_NOT ); } typedef CSphOrderedHash < DWORD, uint64_t, IdentityHash_fn, 128 > CDwordHash; // stores the pair of a tree, and the bitmask of common nodes // which contains the tree. class BitMask_t { XQNode_t * m_pTree; uint64_t m_uMask; public: BitMask_t () : m_pTree ( NULL ) , m_uMask ( 0ull ) {} void Init ( XQNode_t * pTree, uint64_t uMask ) { m_pTree = pTree; m_uMask = uMask; } inline uint64_t GetMask() const { return m_uMask; } inline XQNode_t * GetTree() const { return m_pTree; } }; // a list of unique values. class Associations_t : public CDwordHash { public: // returns true when add the second member. // The reason is that only one is not interesting for us, // but more than two will flood the caller. bool Associate2nd ( uint64_t uTree ) { if ( Exists ( uTree ) ) return false; Add ( 0, uTree ); return GetLength()==2; } // merge with another similar void Merge ( const Associations_t& parents ) { parents.IterateStart(); while ( parents.IterateNext() ) Associate2nd ( parents.IterateGetKey() ); } }; // associate set of nodes, common bitmask for these nodes, // and gives the < to compare different pairs class BitAssociation_t { private: const Associations_t * m_pAssociations; mutable int m_iBits; // The key method of subtree selection. // Most 'heavy' subtrees will be extracted first. inline int GetWeight() const { assert ( m_pAssociations ); int iNodes = m_pAssociations->GetLength(); if ( m_iBits==0 && m_uMask!=0 ) { for ( uint64_t dMask = m_uMask; dMask; dMask >>=1 ) m_iBits += (int)( dMask & 1 ); } // current working formula is num_nodes^2 * num_hits return iNodes * iNodes * m_iBits; } public: uint64_t m_uMask; BitAssociation_t() : m_pAssociations ( NULL ) , m_iBits ( 0 ) , m_uMask ( 0 ) {} void Init ( uint64_t uMask, const Associations_t* dNodes ) { m_uMask = uMask; m_pAssociations = dNodes; m_iBits = 0; } bool operator< (const BitAssociation_t& second) const { return GetWeight() < second.GetWeight(); } }; // for pairs of values builds and stores the association "key -> list of values" class CAssociations_t : public CSphOrderedHash < Associations_t, uint64_t, IdentityHash_fn, 128 > { int m_iBits; // number of non-unique associations public: CAssociations_t() : m_iBits ( 0 ) {} // Add the given pTree into the list of pTrees, associated with given uHash int Associate ( XQNode_t * pTree, uint64_t uHash ) { if ( !Exists ( uHash ) ) Add ( Associations_t(), uHash ); if ( operator[]( uHash ).Associate2nd ( pTree->GetHash() ) ) m_iBits++; return m_iBits; } // merge the existing association of uHash with given chain void MergeAssociations ( const Associations_t & chain, uint64_t uHash ) { if ( !Exists ( uHash ) ) Add ( chain, uHash ); else operator[]( uHash ).Merge ( chain ); } inline int GetBits() const { return m_iBits; } }; // The main class for working with common subtrees class RevealCommon_t : ISphNoncopyable { private: static const int MAX_MULTINODES = 64; CSphVector m_dBitmasks; // all bitmasks for all the nodes CSphVector m_dSubQueries; // final vector with roadmap for tree division. CAssociations_t m_hNodes; // initial accumulator for nodes CAssociations_t m_hInterSections; // initial accumulator for nodes CDwordHash m_hBitOrders; // order numbers for found common subnodes XQOperator_e m_eOp; // my operator which I process private: // returns the order for given uHash (if any). inline int GetBitOrder ( uint64_t uHash ) const { if ( !m_hBitOrders.Exists ( uHash ) ) return -1; return m_hBitOrders[uHash]; } // recursively scans the whole tree and builds the maps // where a list of parents associated with every "leaf" nodes (i.e. with children) bool BuildAssociations ( XQNode_t * pTree ) { if ( IsAppropriate ( pTree ) ) { ARRAY_FOREACH ( i, pTree->m_dChildren ) if ( ( !BuildAssociations ( pTree->m_dChildren[i] ) ) || ( ( m_eOp==pTree->GetOp() ) && ( m_hNodes.Associate ( pTree, pTree->m_dChildren[i]->GetHash() )>=MAX_MULTINODES ) ) ) { return false; } } return true; } // Find all leafs, non-unique across the tree, // and associate the order number with every of them bool CalcCommonNodes () { if ( !m_hNodes.GetBits() ) return false; // there is totally no non-unique leaves int iBit = 0; m_hNodes.IterateStart(); while ( m_hNodes.IterateNext() ) if ( m_hNodes.IterateGet().GetLength() > 1 ) m_hBitOrders.Add ( iBit++, m_hNodes.IterateGetKey() ); assert ( m_hNodes.GetBits()==m_hBitOrders.GetLength() ); m_hNodes.Reset(); ///< since from now we don't need this data anymore return true; } // recursively builds for every node the bitmaks // of common nodes it has as children void BuildBitmasks ( XQNode_t * pTree ) { if ( !IsAppropriate ( pTree ) ) return; if ( m_eOp==pTree->GetOp() ) { // calculate the bitmask int iOrder; uint64_t dMask = 0; ARRAY_FOREACH ( i, pTree->m_dChildren ) { iOrder = GetBitOrder ( pTree->m_dChildren[i]->GetHash() ); if ( iOrder>=0 ) dMask |= 1ull << iOrder; } // add the bitmask into the array if ( dMask ) m_dBitmasks.Add().Init( pTree, dMask ); } // recursively process all the children ARRAY_FOREACH ( i, pTree->m_dChildren ) BuildBitmasks ( pTree->m_dChildren[i] ); } // Collect all possible intersections of Bitmasks. // For every non-zero intersection we collect the list of trees which contain it. void CalcIntersections () { // Round 1. Intersect all content of bitmasks one-by-one. ARRAY_FOREACH ( i, m_dBitmasks ) for ( int j = i+1; j dSubnodes; // masks for our selected subnodes dSubnodes.Reserve ( m_hInterSections.GetLength() ); m_hInterSections.IterateStart(); while ( m_hInterSections.IterateNext() ) dSubnodes.Add().Init( m_hInterSections.IterateGetKey(), &m_hInterSections.IterateGet() ); // sort by weight descending (weight sorting is hold by operator <) dSubnodes.RSort(); m_dSubQueries.Reset(); // make the final subtrees vector: get one-by-one from the beginning, // intresect with all the next and throw out zeros. // The final subqueries will not be intersected between each other. int j; uint64_t uMask; ARRAY_FOREACH ( i, dSubnodes ) { uMask = dSubnodes[i].m_uMask; m_dSubQueries.Add ( uMask ); j = i+1; while ( j < dSubnodes.GetLength() ) { if ( !( dSubnodes[j].m_uMask &= ~uMask ) ) dSubnodes.Remove(j); else j++; } } } // Now we finally extract the common subtrees from original tree // and (recursively) from it's children void Reorganize ( XQNode_t * pTree ) { if ( !IsAppropriate ( pTree ) ) return; if ( m_eOp==pTree->GetOp() ) { // pBranch is for common subset of children, pOtherChildren is for the rest. CSphOrderedHash < XQNode_t*, int, IdentityHash_fn, 64 > hBranches; XQNode_t * pOtherChildren = NULL; int iBit; int iOptimizations = 0; ARRAY_FOREACH ( i, pTree->m_dChildren ) { iBit = GetBitOrder ( pTree->m_dChildren[i]->GetHash() ); // works only with children which are actually common with somebody else if ( iBit>=0 ) { // since subqueries doesn't intersected between each other, // the first hit we found in this loop is exactly what we searched. ARRAY_FOREACH ( j, m_dSubQueries ) if ( ( 1ull << iBit ) & m_dSubQueries[j] ) { XQNode_t * pNode; if ( !hBranches.Exists(j) ) { pNode = new XQNode_t ( pTree->m_dSpec ); pNode->SetOp ( m_eOp, pTree->m_dChildren[i] ); hBranches.Add ( pNode, j ); } else { pNode = hBranches[j]; pNode->m_dChildren.Add ( pTree->m_dChildren[i] ); // Count essential subtrees (with at least 2 children) if ( pNode->m_dChildren.GetLength()==2 ) iOptimizations++; } break; } // another nodes add to the set of "other" children } else { if ( !pOtherChildren ) { pOtherChildren = new XQNode_t ( pTree->m_dSpec ); pOtherChildren->SetOp ( m_eOp, pTree->m_dChildren[i] ); } else pOtherChildren->m_dChildren.Add ( pTree->m_dChildren[i] ); } } // we don't reorganize explicit simple case - as no "others" and only one common. // Also reject optimization if there is nothing to optimize. if ( ( iOptimizations==0 ) | ( !pOtherChildren && ( hBranches.GetLength()==1 ) ) ) { if ( pOtherChildren ) pOtherChildren->m_dChildren.Reset(); hBranches.IterateStart(); while ( hBranches.IterateNext() ) { assert ( hBranches.IterateGet() ); hBranches.IterateGet()->m_dChildren.Reset(); SafeDelete ( hBranches.IterateGet() ); } } else { // reorganize the tree: replace the common subset to explicit node with // only common members inside. This will give the the possibility // to cache the node. pTree->m_dChildren.Reset(); if ( pOtherChildren ) pTree->m_dChildren.SwapData ( pOtherChildren->m_dChildren ); hBranches.IterateStart(); while ( hBranches.IterateNext() ) { if ( hBranches.IterateGet()->m_dChildren.GetLength()==1 ) { pTree->m_dChildren.Add ( hBranches.IterateGet()->m_dChildren[0] ); hBranches.IterateGet()->m_dChildren.Reset(); SafeDelete ( hBranches.IterateGet() ); } else pTree->m_dChildren.Add ( hBranches.IterateGet() ); } } SafeDelete ( pOtherChildren ); } // recursively process all the children ARRAY_FOREACH ( i, pTree->m_dChildren ) Reorganize ( pTree->m_dChildren[i] ); } public: explicit RevealCommon_t ( XQOperator_e eOp ) : m_eOp ( eOp ) {} // actual method for processing tree and reveal (extract) common subtrees void Transform ( int iXQ, const XQQuery_t * pXQ ) { // collect all non-unique nodes for ( int i=0; i1 ) m_iCounter--; if ( m_iCounter<2 ) m_bMarked = false; } }; typedef CSphOrderedHash < MarkedNode_t, uint64_t, IdentityHash_fn, 128 > CSubtreeHash; /// check hashes, then check subtrees, then flag static void FlagCommonSubtrees ( XQNode_t * pTree, CSubtreeHash & hSubTrees, bool bFlag=true, bool bMarkIt=true ) { if ( !IsAppropriate ( pTree ) ) return; // we do not yet have any collisions stats, // but chances are we don't actually need IsEqualTo() at all uint64_t iHash = pTree->GetHash(); if ( bFlag && hSubTrees.Exists ( iHash ) && hSubTrees [ iHash ].m_pTree->IsEqualTo ( pTree ) ) { hSubTrees[iHash].MarkIt (); // we just add all the children but do NOT mark them as common // so that only the subtree root is marked. // also we unmark all the cases which were eaten by bigger trees ARRAY_FOREACH ( i, pTree->m_dChildren ) if ( !hSubTrees.Exists ( pTree->m_dChildren[i]->GetHash() ) ) FlagCommonSubtrees ( pTree->m_dChildren[i], hSubTrees, false, bMarkIt ); else FlagCommonSubtrees ( pTree->m_dChildren[i], hSubTrees, false, false ); } else { if ( !bMarkIt ) hSubTrees[iHash].MarkIt(false); else hSubTrees.Add ( MarkedNode_t ( pTree ), iHash ); ARRAY_FOREACH ( i, pTree->m_dChildren ) FlagCommonSubtrees ( pTree->m_dChildren[i], hSubTrees, bFlag, bMarkIt ); } } static void SignCommonSubtrees ( XQNode_t * pTree, CSubtreeHash & hSubTrees ) { if ( !pTree ) return; uint64_t iHash = pTree->GetHash(); if ( hSubTrees.Exists(iHash) && hSubTrees[iHash].m_bMarked ) pTree->TagAsCommon ( hSubTrees[iHash].m_iOrder, hSubTrees[iHash].m_iCounter ); ARRAY_FOREACH ( i, pTree->m_dChildren ) SignCommonSubtrees ( pTree->m_dChildren[i], hSubTrees ); } int sphMarkCommonSubtrees ( int iXQ, const XQQuery_t * pXQ ) { if ( iXQ<=0 || !pXQ ) return 0; { // Optional reorganize tree to extract common parts RevealCommon_t ( SPH_QUERY_AND ).Transform ( iXQ, pXQ ); RevealCommon_t ( SPH_QUERY_OR ).Transform ( iXQ, pXQ ); } // flag common subtrees and refcount them CSubtreeHash hSubtrees; for ( int i=0; i m_dZones; ///< zone indexes in per-query zones list public: XQLimitSpec_t () { Reset(); } inline void Reset () { m_bInvisible = false; m_bFieldSpec = false; m_iFieldMaxPos = 0; m_dFieldMask.Set(); m_dZones.Reset(); } inline void Hide () { m_bInvisible = true; } XQLimitSpec_t ( const XQLimitSpec_t& dLimit ) { if ( this==&dLimit ) return; Reset(); *this = dLimit; } XQLimitSpec_t & operator = ( const XQLimitSpec_t& dLimit ) { if ( this==&dLimit ) return *this; if ( dLimit.m_bFieldSpec ) SetFieldSpec ( dLimit.m_dFieldMask, dLimit.m_iFieldMaxPos ); if ( dLimit.m_dZones.GetLength() ) SetZoneSpec ( dLimit.m_dZones ); return *this; } public: void SetZoneSpec ( const CSphVector & dZones ); void SetFieldSpec ( const CSphSmallBitvec& uMask, int iMaxPos ); }; /// extended query node /// plain nodes are just an atom /// non-plain nodes are a logical function over children nodes struct XQNode_t : public ISphNoncopyable { XQNode_t * m_pParent; ///< my parent node (NULL for root ones) private: XQOperator_e m_eOp; ///< operation over childen int m_iOrder; int m_iCounter; private: mutable uint64_t m_iMagicHash; public: CSphVector m_dChildren; ///< non-plain node children XQLimitSpec_t m_dSpec; ///< specification by field, zone(s), etc. CSphVector m_dWords; ///< query words (plain node) int m_iOpArg; ///< operator argument (proximity distance, quorum count) int m_iAtomPos; ///< atom position override (currently only used within expanded nodes) bool m_bVirtuallyPlain; ///< "virtually plain" flag (currently only used by expanded nodes) bool m_bNotWeighted; ///< this our expanded but empty word's node public: /// ctor explicit XQNode_t ( const XQLimitSpec_t& dSpec ) : m_pParent ( NULL ) , m_eOp ( SPH_QUERY_AND ) , m_iOrder ( 0 ) , m_iCounter ( 0 ) , m_iMagicHash ( 0 ) , m_dSpec ( dSpec ) , m_iOpArg ( 0 ) , m_iAtomPos ( -1 ) , m_bVirtuallyPlain ( false ) , m_bNotWeighted ( false ) {} /// dtor ~XQNode_t () { ARRAY_FOREACH ( i, m_dChildren ) SafeDelete ( m_dChildren[i] ); } /// check if i'm empty bool IsEmpty () const { assert ( m_dWords.GetLength()==0 || m_dChildren.GetLength()==0 ); return m_dWords.GetLength()==0 && m_dChildren.GetLength()==0; } /// setup field limits void SetFieldSpec ( const CSphSmallBitvec& uMask, int iMaxPos ); /// setup zone limits void SetZoneSpec ( const CSphVector & dZones ); /// copy field/zone limits from another node void CopySpecs ( const XQNode_t * pSpecs ); /// unconditionally clear field mask void ClearFieldMask (); public: /// get my operator XQOperator_e GetOp () const { return m_eOp; } /// get my cache order DWORD GetOrder () const { return m_iOrder; } /// get my cache counter int GetCount () const { return m_iCounter; } /// setup common nodes for caching void TagAsCommon ( int iOrder, int iCounter ) { m_iCounter = iCounter; m_iOrder = iOrder; } /// precise comparison bool IsEqualTo ( const XQNode_t * pNode ); /// hash me uint64_t GetHash () const; /// setup new operator and args void SetOp ( XQOperator_e eOp, XQNode_t * pArg1, XQNode_t * pArg2=NULL ); /// setup new operator and args void SetOp ( XQOperator_e eOp, CSphVector & dArgs ) { m_eOp = eOp; m_dChildren.SwapData(dArgs); } /// setup new operator (careful parser/transform use only) void SetOp ( XQOperator_e eOp ) { m_eOp = eOp; } #ifndef NDEBUG /// consistency check void Check ( bool bRoot ) { assert ( bRoot || !IsEmpty() ); // empty leaves must be removed from the final tree; empty root is allowed assert (!( m_dWords.GetLength() && m_eOp!=SPH_QUERY_AND && m_eOp!=SPH_QUERY_PHRASE && m_eOp!=SPH_QUERY_PROXIMITY && m_eOp!=SPH_QUERY_QUORUM )); // words are only allowed in these node types assert (!( m_dWords.GetLength()==1 && m_eOp!=SPH_QUERY_AND )); // 1-word leaves must be of AND type ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->Check ( false ); } #endif }; /// extended query struct XQQuery_t : public ISphNoncopyable { CSphString m_sParseError; CSphString m_sParseWarning; CSphVector m_dZones; XQNode_t * m_pRoot; /// ctor XQQuery_t () { m_pRoot = NULL; } /// dtor ~XQQuery_t () { SafeDelete ( m_pRoot ); } }; ////////////////////////////////////////////////////////////////////////////// /// parses the query and returns the resulting tree /// return false and fills tQuery.m_sParseError on error /// WARNING, parsed tree might be NULL (eg. if query was empty) bool sphParseExtendedQuery ( XQQuery_t & tQuery, const char * sQuery, const ISphTokenizer * pTokenizer, const CSphSchema * pSchema, CSphDict * pDict, int iStopwordStep ); /// analyse vector of trees and tag common parts of them (to cache them later) int sphMarkCommonSubtrees ( int iXQ, const XQQuery_t * pXQ ); #endif // _sphinxquery_ // // $Id: sphinxquery.h 3114 2012-02-21 14:52:21Z klirichek $ // sphinx-2.0.4-release/src/sphinxversion.h0000644000176700017710000000022011724063146017620 0ustar deogardeogar#define SPH_SVN_TAG "rel20" #define SPH_SVN_REV 3135 #define SPH_SVN_REVSTR "3135" #define SPH_SVN_TAGREV "r3135" #define SPHINX_TAG "-release" sphinx-2.0.4-release/src/yysphinxselect.h0000644000176700017710000000443711677266637020033 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { SEL_TOKEN = 258, SEL_ID = 259, SEL_AS = 260, SEL_AVG = 261, SEL_MAX = 262, SEL_MIN = 263, SEL_SUM = 264, SEL_COUNT = 265, SEL_WEIGHT = 266, SEL_DISTINCT = 267, TOK_NEG = 268, TOK_LTE = 269, TOK_GTE = 270, TOK_EQ = 271, TOK_NE = 272, TOK_CONST_STRING = 273, TOK_OR = 274, TOK_AND = 275, TOK_NOT = 276 }; #endif #define SEL_TOKEN 258 #define SEL_ID 259 #define SEL_AS 260 #define SEL_AVG 261 #define SEL_MAX 262 #define SEL_MIN 263 #define SEL_SUM 264 #define SEL_COUNT 265 #define SEL_WEIGHT 266 #define SEL_DISTINCT 267 #define TOK_NEG 268 #define TOK_LTE 269 #define TOK_GTE 270 #define TOK_EQ 271 #define TOK_NE 272 #define TOK_CONST_STRING 273 #define TOK_OR 274 #define TOK_AND 275 #define TOK_NOT 276 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef int YYSTYPE; # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif sphinx-2.0.4-release/src/sphinxudf.h0000644000176700017710000000420111714110631016703 0ustar deogardeogar// // $Id$ // // // Copyright (c) 2011, Andrew Aksyonoff // Copyright (c) 2011, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // // // Sphinx UDF interface header // // This file will be included by UDF implementations, so it should be // portable plain C, stay standalone, and change as rarely as possible. // #ifndef _sphinxudf_ #define _sphinxudf_ /// error buffer size #define SPH_UDF_ERROR_LEN 256 /// UDF argument and result value types enum sphinx_udf_argtype { SPH_UDF_TYPE_UINT32 = 1, ///< unsigned 32-bit integer SPH_UDF_TYPE_UINT32SET = 2, ///< sorted set of unsigned 32-bit integers SPH_UDF_TYPE_INT64 = 3, ///< signed 64-bit integer SPH_UDF_TYPE_FLOAT = 4, ///< single-precision IEEE 754 float SPH_UDF_TYPE_STRING = 5, ///< non-ASCIIZ string, with a separately stored length SPH_UDF_TYPE_UINT64SET = 6 ///< sorted set of unsigned 64-bit integers }; /// UDF call arguments typedef struct st_sphinx_udf_args { int arg_count; ///< number of arguments enum sphinx_udf_argtype * arg_types; ///< argument types char ** arg_values; ///< argument values (strings are not (!) ASCIIZ; see str_lengths below) char ** arg_names; ///< argument names (ASCIIZ argname in 'expr AS argname' case; NULL otherwise) int * str_lengths; ///< string argument lengths } SPH_UDF_ARGS; /// UDF initialization typedef struct st_sphinx_udf_init { void * func_data; ///< function data (will be passed to calls, deinit) char is_const; ///< whether a function returns a constant } SPH_UDF_INIT; /// integer return types #if defined(_MSC_VER) || defined(__WIN__) typedef __int64 sphinx_int64_t; typedef unsigned __int64 sphinx_uint64_t; #else typedef long long sphinx_int64_t; typedef unsigned long long sphinx_uint64_t; #endif #endif // _sphinxudf_ // // $Id$ // sphinx-2.0.4-release/src/sphinxfilter.h0000644000176700017710000000344011711621267017427 0ustar deogardeogar// // $Id: sphinxfilter.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxfilter_ #define _sphinxfilter_ #include "sphinx.h" struct ISphFilter { virtual void SetLocator ( const CSphAttrLocator & ) {} virtual void SetRange ( SphAttr_t, SphAttr_t ) {} virtual void SetRangeFloat ( float, float ) {} virtual void SetValues ( const SphAttr_t *, int ) {} virtual void SetMVAStorage ( const DWORD * ) {} virtual ~ISphFilter () {} /// evaluate filter for a given match /// returns true if match satisfies the filter critertia (i.e. in range, found in values list etc) virtual bool Eval ( const CSphMatch & tMatch ) const = 0; /// evaluate filter for a given block /// args are pMinDocinfo and pMaxDocinfo /// returns false if no document in block can possibly pass through the filter virtual bool EvalBlock ( const DWORD *, const DWORD * ) const { // if filter does not implement block-level evaluation we assume the block will pass return true; } virtual ISphFilter * Join ( ISphFilter * pFilter ); bool UsesAttrs() const { return m_bUsesAttrs; } ISphFilter() : m_bUsesAttrs ( true ) {} protected : bool m_bUsesAttrs; }; ISphFilter * sphCreateFilter ( const CSphFilterSettings &, const CSphSchema &, const DWORD * pMva, CSphString & sError ); ISphFilter * sphJoinFilters ( ISphFilter *, ISphFilter * ); #endif // _sphinxfilter_ // // $Id: sphinxfilter.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxtimers.h0000644000176700017710000000174211711621267017450 0ustar deogardeogar// // $Id: sphinxtimers.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // DECLARE_TIMER ( collect_hits ) DECLARE_TIMER ( sort_hits ) DECLARE_TIMER ( write_hits ) DECLARE_TIMER ( invert_hits ) DECLARE_TIMER ( read_hits ) DECLARE_TIMER ( src_document ) DECLARE_TIMER ( src_sql ) DECLARE_TIMER ( src_xmlpipe ) DECLARE_TIMER ( query_init ) DECLARE_TIMER ( query_load_dir ) DECLARE_TIMER ( query_load_words ) DECLARE_TIMER ( query_match ) DECLARE_TIMER ( query_sort ) DECLARE_TIMER ( debug1 ) DECLARE_TIMER ( debug2 ) DECLARE_TIMER ( debug3 ) // // $Id: sphinxtimers.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/yy.cmd0000644000176700017710000000122011405303332015645 0ustar deogardeogar@echo off bison -l -d -o yysphinxexpr.c sphinxexpr.y bison -l -d -o yysphinxselect.c sphinxselect.y bison -l -d -o yysphinxquery.c sphinxquery.y bison -l -d -o yysphinxql.c sphinxql.y flex -i -ollsphinxql.c sphinxql.l perl -npe "s/ __attr/\/\/ __attr/" -i.bak yysphinxexpr.c perl -npe "s/^yyerrlab1:/\/\/yyerrlab1:/m;s/ __attr/\/\/ __attr/" -i.bak yysphinxselect.c perl -npe "s/^yyerrlab1:/\/\/yyerrlab1:/m;s/ __attr/\/\/ __attr/" -i.bak yysphinxquery.c perl -npe "s/(#include )/#if !USE_WINDOWS\n\1\n#endif/;s/\(size_t\) num_to_read/num_to_read/" -i.bak llsphinxql.c patch -s -p0 -i yysphinxql.patch rm -f *.bak rm -f yysphinxql.c.orig sphinx-2.0.4-release/src/Makefile.in0000644000176700017710000004760711421071150016605 0ustar deogardeogar# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : bin_PROGRAMS = indexer$(EXEEXT) searchd$(EXEEXT) search$(EXEEXT) \ spelldump$(EXEEXT) indextool$(EXEEXT) noinst_PROGRAMS = tests$(EXEEXT) subdir = src DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LIBRARIES = $(noinst_LIBRARIES) AR = ar ARFLAGS = cru libsphinx_a_AR = $(AR) $(ARFLAGS) libsphinx_a_LIBADD = am__objects_1 = sphinx.$(OBJEXT) sphinxexcerpt.$(OBJEXT) \ sphinxquery.$(OBJEXT) sphinxsoundex.$(OBJEXT) \ sphinxmetaphone.$(OBJEXT) sphinxstemen.$(OBJEXT) \ sphinxstemru.$(OBJEXT) sphinxstemcz.$(OBJEXT) \ sphinxutils.$(OBJEXT) md5.$(OBJEXT) sphinxstd.$(OBJEXT) \ sphinxsort.$(OBJEXT) sphinxexpr.$(OBJEXT) \ sphinxfilter.$(OBJEXT) sphinxsearch.$(OBJEXT) \ sphinxrt.$(OBJEXT) am_libsphinx_a_OBJECTS = $(am__objects_1) libsphinx_a_OBJECTS = $(am_libsphinx_a_OBJECTS) am__installdirs = "$(DESTDIR)$(bindir)" PROGRAMS = $(bin_PROGRAMS) $(noinst_PROGRAMS) am_indexer_OBJECTS = indexer.$(OBJEXT) indexer_OBJECTS = $(am_indexer_OBJECTS) indexer_LDADD = $(LDADD) @USE_LIBSTEMMER_TRUE@am__DEPENDENCIES_1 = \ @USE_LIBSTEMMER_TRUE@ $(top_srcdir)/libstemmer_c/libstemmer.a am__DEPENDENCIES_2 = am__DEPENDENCIES_3 = libsphinx.a $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_2) $(am__DEPENDENCIES_2) indexer_DEPENDENCIES = $(am__DEPENDENCIES_3) am_indextool_OBJECTS = indextool.$(OBJEXT) indextool_OBJECTS = $(am_indextool_OBJECTS) indextool_LDADD = $(LDADD) indextool_DEPENDENCIES = $(am__DEPENDENCIES_3) am_search_OBJECTS = search.$(OBJEXT) search_OBJECTS = $(am_search_OBJECTS) search_LDADD = $(LDADD) search_DEPENDENCIES = $(am__DEPENDENCIES_3) am_searchd_OBJECTS = searchd.$(OBJEXT) searchd_OBJECTS = $(am_searchd_OBJECTS) searchd_LDADD = $(LDADD) searchd_DEPENDENCIES = $(am__DEPENDENCIES_3) am_spelldump_OBJECTS = spelldump.$(OBJEXT) spelldump_OBJECTS = $(am_spelldump_OBJECTS) spelldump_LDADD = $(LDADD) spelldump_DEPENDENCIES = $(am__DEPENDENCIES_3) am_tests_OBJECTS = tests.$(OBJEXT) tests_OBJECTS = $(am_tests_OBJECTS) tests_LDADD = $(LDADD) tests_DEPENDENCIES = $(am__DEPENDENCIES_3) DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)/config depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) CXXLD = $(CXX) CXXLINK = $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) \ -o $@ SOURCES = $(libsphinx_a_SOURCES) $(indexer_SOURCES) \ $(indextool_SOURCES) $(search_SOURCES) $(searchd_SOURCES) \ $(spelldump_SOURCES) $(tests_SOURCES) DIST_SOURCES = $(libsphinx_a_SOURCES) $(indexer_SOURCES) \ $(indextool_SOURCES) $(search_SOURCES) $(searchd_SOURCES) \ $(spelldump_SOURCES) $(tests_SOURCES) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CONFDIR = @CONFDIR@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBRT = @LIBRT@ LIBS = @LIBS@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PGSQL_CFLAGS = @PGSQL_CFLAGS@ PGSQL_LIBS = @PGSQL_LIBS@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pgconfig = @pgconfig@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ SRC_SPHINX = sphinx.cpp sphinxexcerpt.cpp sphinxquery.cpp \ sphinxsoundex.cpp sphinxmetaphone.cpp sphinxstemen.cpp sphinxstemru.cpp sphinxstemcz.cpp \ sphinxutils.cpp md5.cpp sphinxstd.cpp sphinxsort.cpp sphinxexpr.cpp sphinxfilter.cpp \ sphinxsearch.cpp sphinxrt.cpp noinst_LIBRARIES = libsphinx.a libsphinx_a_SOURCES = $(SRC_SPHINX) indexer_SOURCES = indexer.cpp searchd_SOURCES = searchd.cpp search_SOURCES = search.cpp spelldump_SOURCES = spelldump.cpp indextool_SOURCES = indextool.cpp tests_SOURCES = tests.cpp BUILT_SOURCES = extract-version @USE_LIBSTEMMER_FALSE@LIBSTEMMER_LIBS = @USE_LIBSTEMMER_TRUE@LIBSTEMMER_LIBS = $(top_srcdir)/libstemmer_c/libstemmer.a @USE_LIBSTEMMER_FALSE@AM_CPPFLAGS = -DSYSCONFDIR="\"$(sysconfdir)\"" -DDATADIR="\"$(localstatedir)/data\"" @USE_LIBSTEMMER_TRUE@AM_CPPFLAGS = -I$(top_srcdir)/libstemmer_c/include -DSYSCONFDIR="\"$(sysconfdir)\"" -DDATADIR="\"$(localstatedir)/data\"" COMMON_LIBS = libsphinx.a $(LIBSTEMMER_LIBS) $(MYSQL_LIBS) $(PGSQL_LIBS) LDADD = $(COMMON_LIBS) all: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) all-am .SUFFIXES: .SUFFIXES: .cpp .o .obj $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLIBRARIES: -test -z "$(noinst_LIBRARIES)" || rm -f $(noinst_LIBRARIES) libsphinx.a: $(libsphinx_a_OBJECTS) $(libsphinx_a_DEPENDENCIES) -rm -f libsphinx.a $(libsphinx_a_AR) libsphinx.a $(libsphinx_a_OBJECTS) $(libsphinx_a_LIBADD) $(RANLIB) libsphinx.a install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p; \ then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS) clean-noinstPROGRAMS: -test -z "$(noinst_PROGRAMS)" || rm -f $(noinst_PROGRAMS) indexer$(EXEEXT): $(indexer_OBJECTS) $(indexer_DEPENDENCIES) @rm -f indexer$(EXEEXT) $(CXXLINK) $(indexer_OBJECTS) $(indexer_LDADD) $(LIBS) indextool$(EXEEXT): $(indextool_OBJECTS) $(indextool_DEPENDENCIES) @rm -f indextool$(EXEEXT) $(CXXLINK) $(indextool_OBJECTS) $(indextool_LDADD) $(LIBS) search$(EXEEXT): $(search_OBJECTS) $(search_DEPENDENCIES) @rm -f search$(EXEEXT) $(CXXLINK) $(search_OBJECTS) $(search_LDADD) $(LIBS) searchd$(EXEEXT): $(searchd_OBJECTS) $(searchd_DEPENDENCIES) @rm -f searchd$(EXEEXT) $(CXXLINK) $(searchd_OBJECTS) $(searchd_LDADD) $(LIBS) spelldump$(EXEEXT): $(spelldump_OBJECTS) $(spelldump_DEPENDENCIES) @rm -f spelldump$(EXEEXT) $(CXXLINK) $(spelldump_OBJECTS) $(spelldump_LDADD) $(LIBS) tests$(EXEEXT): $(tests_OBJECTS) $(tests_DEPENDENCIES) @rm -f tests$(EXEEXT) $(CXXLINK) $(tests_OBJECTS) $(tests_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/indexer.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/indextool.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/md5.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/search.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/searchd.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/spelldump.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinx.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxexcerpt.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxexpr.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxfilter.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxmetaphone.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxquery.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxrt.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxsearch.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxsort.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxsoundex.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxstd.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxstemcz.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxstemen.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxstemru.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/sphinxutils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tests.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) check-am all-am: Makefile $(LIBRARIES) $(PROGRAMS) installdirs: for dir in "$(DESTDIR)$(bindir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-noinstLIBRARIES \ clean-noinstPROGRAMS mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS .MAKE: all check install install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-binPROGRAMS \ clean-generic clean-noinstLIBRARIES clean-noinstPROGRAMS ctags \ distclean distclean-compile distclean-generic distclean-tags \ distdir dvi dvi-am html html-am info info-am install \ install-am install-binPROGRAMS install-data install-data-am \ install-dvi install-dvi-am install-exec install-exec-am \ install-html install-html-am install-info install-info-am \ install-man install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic pdf pdf-am \ ps ps-am tags uninstall uninstall-am uninstall-binPROGRAMS .PHONY: extract-version extract-version: /bin/sh svnxrev.sh .. # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sphinx-2.0.4-release/src/svnxrev.cmd0000644000176700017710000000060711356363542016745 0ustar deogardeogar@echo off setlocal enabledelayedexpansion set REVFILE=%1\src\sphinxversion.h if exist %1\.svn ( svn info --xml %1 | perl %1\src\svnxrev.pl %REVFILE% ) else if exist %1\.hg ( if exist %REVFILE% for /f "delims=" %%a in (%REVFILE%) do set oldfile=%%a for /f %%i in ('hg id') do set newfile=#define SPH_SVN_TAGREV "%%i" if "!newfile!" neq "!oldfile!" echo !newfile!> %REVFILE% ) sphinx-2.0.4-release/src/yysphinxquery.h0000644000176700017710000000426111611324334017665 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_KEYWORD = 258, TOK_NEAR = 259, TOK_INT = 260, TOK_FIELDLIMIT = 261, TOK_ZONE = 262, TOK_BEFORE = 263, TOK_SENTENCE = 264, TOK_PARAGRAPH = 265 }; #endif #define TOK_KEYWORD 258 #define TOK_NEAR 259 #define TOK_INT 260 #define TOK_FIELDLIMIT 261 #define TOK_ZONE 262 #define TOK_BEFORE 263 #define TOK_SENTENCE 264 #define TOK_PARAGRAPH 265 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef union YYSTYPE { XQNode_t * pNode; // tree node struct { int iValue; int iStrIndex; } tInt; struct // field spec { CSphSmallBitvec dMask; // acceptable fields mask int iMaxPos; // max allowed position within field } tFieldLimit; int iZoneVec; } YYSTYPE; /* Line 1204 of yacc.c. */ # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif sphinx-2.0.4-release/src/sphinxcustomsort.inl0000644000176700017710000000144710641215207020716 0ustar deogardeogar// // $Id: sphinxcustomsort.inl 703 2007-06-29 14:43:19Z shodan $ // // in this file, one can declare a custom sorting function // which will be used when sorting by "@custom" in extended sorting mode // first, declare which attributes will be used in the function // // they will be then accessible in the function using MATCH_ATTR(number) // macro, where "number" is assigned in the order of declaration MATCH_DECLARE_ATTR ( "group_id" ) MATCH_DECLARE_ATTR ( "date_added" ) // second, define the function itself // // in this example, the matches will be sorted by // @weight + group_id*0.3 - log ( NOW() - date_added ) // in descending order MATCH_FUNCTION = MATCH_WEIGHT + MATCH_ATTR(0)*0.3f - log ( MATCH_NOW - MATCH_ATTR(1) ) // // $Id: sphinxcustomsort.inl 703 2007-06-29 14:43:19Z shodan $ // sphinx-2.0.4-release/src/sphinxexpr.h0000644000176700017710000001114711711621267017123 0ustar deogardeogar// // $Id: sphinxexpr.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxexpr_ #define _sphinxexpr_ #include "sphinxstd.h" /// forward decls class CSphMatch; struct CSphSchema; struct CSphString; /// known attribute types enum ESphAttr { SPH_ATTR_NONE = 0, ///< not an attribute at all SPH_ATTR_INTEGER = 1, ///< unsigned 32-bit integer SPH_ATTR_TIMESTAMP = 2, ///< this attr is a timestamp SPH_ATTR_ORDINAL = 3, ///< this attr is an ordinal string number (integer at search time, specially handled at indexing time) SPH_ATTR_BOOL = 4, ///< this attr is a boolean bit field SPH_ATTR_FLOAT = 5, ///< floating point number (IEEE 32-bit) SPH_ATTR_BIGINT = 6, ///< signed 64-bit integer SPH_ATTR_STRING = 7, ///< string (binary; in-memory) SPH_ATTR_WORDCOUNT = 8, ///< string word count (integer at search time,tokenized and counted at indexing time) SPH_ATTR_UINT32SET = 0x40000001UL, ///< MVA, set of unsigned 32-bit integers SPH_ATTR_UINT64SET = 0x40000002UL ///< MVA, set of unsigned 64-bit integers }; /// expression evaluator /// can always be evaluated in floats using Eval() /// can sometimes be evaluated in integers using IntEval(), depending on type as returned from sphExprParse() struct ISphExpr : public ISphRefcounted { public: /// evaluate this expression for that match virtual float Eval ( const CSphMatch & tMatch ) const = 0; /// evaluate this expression for that match, using int math virtual int IntEval ( const CSphMatch & tMatch ) const { assert ( 0 ); return (int) Eval ( tMatch ); } /// evaluate this expression for that match, using int64 math virtual int64_t Int64Eval ( const CSphMatch & tMatch ) const { assert ( 0 ); return (int64_t) Eval ( tMatch ); } /// evaluate string attr virtual int StringEval ( const CSphMatch &, const BYTE ** ppStr ) const { *ppStr = NULL; return 0; } /// evaluate MVA attr virtual const DWORD * MvaEval ( const CSphMatch & ) const { assert ( 0 ); return NULL; } /// check for arglist subtype virtual bool IsArglist () const { return false; } /// setup MVA pool virtual void SetMVAPool ( const DWORD * ) {} /// setup sting pool virtual void SetStringPool ( const BYTE * ) {} /// get schema columns index which affect expression virtual void GetDependencyColumns ( CSphVector & ) const {} }; /// hook to extend expressions /// lets one to add her own identifier and function handlers struct ISphExprHook { virtual ~ISphExprHook () {} /// checks for an identifier known to the hook /// returns -1 on failure, a non-negative OID on success virtual int IsKnownIdent ( const char * sIdent ) = 0; /// checks for a valid function call /// returns -1 on failure, a non-negative OID on success (possibly adjusted) virtual int IsKnownFunc ( const char * sFunc ) = 0; /// create node by OID virtual ISphExpr * CreateNode ( int iID, ISphExpr * pLeft ) = 0; /// get identifier return type by OID virtual ESphAttr GetIdentType ( int iID ) = 0; /// get function return type by OID and argument types vector /// must return SPH_ATTR_NONE and fill the message on failure virtual ESphAttr GetReturnType ( int iID, const CSphVector & dArgs, bool bAllConst, CSphString & sError ) = 0; /// recursive scope check virtual void CheckEnter ( int iID ) = 0; /// recursive scope check virtual void CheckExit ( int iID ) = 0; }; /// parses given expression, builds evaluator /// returns NULL and fills sError on failure /// returns pointer to evaluator on success /// fills pAttrType with result type (for now, can be SPH_ATTR_SINT or SPH_ATTR_FLOAT) /// fills pUsesWeight with a flag whether match relevance is referenced in expression AST ISphExpr * sphExprParse ( const char * sExpr, const CSphSchema & tSchema, ESphAttr * pAttrType, bool * pUsesWeight, CSphString & sError, CSphSchema * pExtra=NULL, ISphExprHook * pHook=NULL ); ////////////////////////////////////////////////////////////////////////// /// initialize UDF manager void sphUDFInit ( const char * sUdfDir ); /// load UDF function bool sphUDFCreate ( const char * szLib, const char * szFunc, ESphAttr eRetType, CSphString & sError ); /// unload UDF function bool sphUDFDrop ( const char * szFunc, CSphString & sError ); #endif // _sphinxexpr_ // // $Id: sphinxexpr.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxutils.h0000644000176700017710000001460311711621267017305 0ustar deogardeogar// // $Id: sphinxutils.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // /// @file sphinxutils.h /// Declarations for the stuff shared by all Sphinx utilities. #ifndef _sphinxutils_ #define _sphinxutils_ #include #include ///////////////////////////////////////////////////////////////////////////// /// my own isalpha (let's build our own theme park!) inline int sphIsAlpha ( int c ) { return ( c>='0' && c<='9' ) || ( c>='a' && c<='z' ) || ( c>='A' && c<='Z' ) || c=='-' || c=='_'; } /// my own isspace inline bool sphIsSpace ( int iCode ) { return iCode==' ' || iCode=='\t' || iCode=='\n' || iCode=='\r'; } /// string splitter, extracts sequences of alphas (as in sphIsAlpha) inline void sphSplit ( CSphVector & dOut, const char * sIn ) { if ( !sIn ) return; const char * p = (char*)sIn; while ( *p ) { // skip non-alphas while ( (*p) && !sphIsAlpha(*p) ) p++; if ( !(*p) ) break; // this is my next token assert ( sphIsAlpha(*p) ); const char * sNext = p; while ( sphIsAlpha(*p) ) p++; if ( sNext!=p ) dOut.Add().SetBinary ( sNext, p-sNext ); } } /// config section (hash of variant values) class CSphConfigSection : public SmallStringHash_T < CSphVariant > { public: /// get integer option value by key and default value int GetInt ( const char * sKey, int iDefault=0 ) const { CSphVariant * pEntry = (*this)( sKey ); return pEntry ? pEntry->intval() : iDefault; } /// get float option value by key and default value float GetFloat ( const char * sKey, float fDefault=0.0f ) const { CSphVariant * pEntry = (*this)( sKey ); return pEntry ? pEntry->floatval() : fDefault; } /// get string option value by key and default value const char * GetStr ( const char * sKey, const char * sDefault="" ) const { CSphVariant * pEntry = (*this)( sKey ); return pEntry ? pEntry->cstr() : sDefault; } /// get size option (plain int, or with K/M prefix) value by key and default value int GetSize ( const char * sKey, int iDefault ) const; }; /// config section type (hash of sections) typedef SmallStringHash_T < CSphConfigSection > CSphConfigType; /// config (hash of section types) typedef SmallStringHash_T < CSphConfigType > CSphConfig; /// simple config file class CSphConfigParser { public: CSphConfig m_tConf; public: CSphConfigParser (); bool Parse ( const char * sFileName, const char * pBuffer = NULL ); // fail-save loading new config over existing. bool ReParse ( const char * sFileName, const char * pBuffer = NULL ); protected: CSphString m_sFileName; int m_iLine; CSphString m_sSectionType; CSphString m_sSectionName; char m_sError [ 1024 ]; int m_iWarnings; static const int WARNS_THRESH = 5; protected: bool IsPlainSection ( const char * sKey ); bool IsNamedSection ( const char * sKey ); bool AddSection ( const char * sType, const char * sSection ); void AddKey ( const char * sKey, char * sValue ); bool ValidateKey ( const char * sKey ); #if !USE_WINDOWS bool TryToExec ( char * pBuffer, char * pEnd, const char * szFilename, CSphVector & dResult ); #endif char * GetBufferString ( char * szDest, int iMax, const char * & szSource ); }; ///////////////////////////////////////////////////////////////////////////// enum { TOKENIZER_SBCS = 1, TOKENIZER_UTF8 = 2, TOKENIZER_NGRAM = 3 }; /// load config file const char * sphLoadConfig ( const char * sOptConfig, bool bQuiet, CSphConfigParser & cp ); /// configure tokenizer from index definition section bool sphConfTokenizer ( const CSphConfigSection & hIndex, CSphTokenizerSettings & tSettings, CSphString & sError ); /// configure dictionary from index definition section void sphConfDictionary ( const CSphConfigSection & hIndex, CSphDictSettings & tSettings ); /// configure index from index definition section bool sphConfIndex ( const CSphConfigSection & hIndex, CSphIndexSettings & tSettings, CSphString & sError ); /// try to set dictionary, tokenizer and misc settings for an index (if not already set) bool sphFixupIndexSettings ( CSphIndex * pIndex, const CSphConfigSection & hIndex, CSphString & sError ); enum ESphLogLevel { SPH_LOG_FATAL = 0, SPH_LOG_WARNING = 1, SPH_LOG_INFO = 2, SPH_LOG_DEBUG = 3, SPH_LOG_VERBOSE_DEBUG = 4, SPH_LOG_VERY_VERBOSE_DEBUG = 5 }; typedef void ( *SphLogger_fn )( ESphLogLevel, const char *, va_list ); void sphWarning ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphInfo ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphLogFatal ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphLogDebug ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphLogDebugv ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphLogDebugvv ( const char * sFmt, ... ) __attribute__((format(printf,1,2))); void sphSetLogger ( SphLogger_fn fnLog ); ////////////////////////////////////////////////////////////////////////// /// how do we properly exit from the crash handler? #if !USE_WINDOWS #ifndef NDEBUG // UNIX debug build, die and dump core #define CRASH_EXIT { signal ( sig, SIG_DFL ); kill ( getpid(), sig ); } #else // UNIX release build, just die #define CRASH_EXIT exit ( 2 ) #endif #else #ifndef NDEBUG // Windows debug build, show prompt to attach debugger #define CRASH_EXIT return EXCEPTION_CONTINUE_SEARCH #else // Windows release build, just die #define CRASH_EXIT return EXCEPTION_EXECUTE_HANDLER #endif #endif /// simple write wrapper /// simplifies partial write checks, and also supresses "fortified" glibc warnings bool sphWrite ( int iFD, const void * pBuf, size_t iSize ); /// async safe, BUT NOT THREAD SAFE, fprintf void sphSafeInfo ( int iFD, const char * sFmt, ... ); #if !USE_WINDOWS /// UNIX backtrace gets printed out to a stream void sphBacktrace ( int iFD, bool bSafe=false ); #else /// Windows minidump gets saved to a file void sphBacktrace ( EXCEPTION_POINTERS * pExc, const char * sFile ); #endif #endif // _sphinxutils_ // // $Id: sphinxutils.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/spelldump.cpp0000644000176700017710000006376211711621267017265 0ustar deogardeogar// // $Id: spelldump.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxutils.h" #include const int MAX_STR_LENGTH = 512; ////////////////////////////////////////////////////////////////////////// BYTE GetWordchar ( const char * & szSet ) { if ( *szSet=='\\' ) { if ( !szSet[1] || !szSet[2] || !szSet[3] ) return 0; char szBuf[3]; memcpy ( szBuf, szSet+2, 2 ); szBuf[2] = 0; char * szStop = NULL; int iRes = strtol ( szBuf, &szStop, 16 ); if ( szStop!=szBuf+2 || iRes<0 || iRes>255 ) return 0; szSet += 4; return (BYTE) iRes; } return *szSet++; } bool IsInSet ( BYTE uLetter, const char * szSet ) { if ( !szSet ) return false; bool bInvert = ( *szSet=='^' ); if ( bInvert ) ++szSet; const char * szSep = strchr ( szSet, '-' ); bool bRange = ( szSep!=NULL ); if ( bRange ) { BYTE uRange1 = GetWordchar ( szSet ); szSep++; BYTE uRange2 = GetWordchar ( szSep ); if ( uLetter>=Min ( uRange1, uRange2 ) && uLetter<=Max ( uRange1, uRange2 ) ) return !bInvert; } else { BYTE uChar = 0; while ( ( uChar = GetWordchar ( szSet ) )!=0 ) if ( uChar==uLetter ) break; bool bEnd = !uChar; if ( bInvert && bEnd ) return true; if ( !bInvert && !bEnd ) return true; } return false; } bool GetSetMinMax ( const char * szSet, BYTE & uMin, BYTE & uMax ) { if ( !szSet || !*szSet ) return false; uMin = GetWordchar ( szSet ); uMax = uMin; BYTE uChar; while ( ( uChar = GetWordchar ( szSet ) )!=0 ) if ( uChar!='-' ) { uMin = Min ( uMin, uChar ); uMax = Max ( uMax, uChar ); } if ( !uMin || !uMax ) return false; return true; } ////////////////////////////////////////////////////////////////////////// class CISpellDict { public: struct CISpellDictWord { CSphString m_sWord; CSphString m_sFlags; }; bool Load ( const char * szFilename ); void IterateStart (); const CISpellDictWord * IterateNext (); private: CSphVector < CISpellDictWord > m_dEntries; int m_iIterator; }; bool CISpellDict::Load ( const char * szFilename ) { if ( !szFilename ) return false; m_dEntries.Reset (); m_dEntries.Reserve ( 131072 ); FILE * pFile = fopen ( szFilename, "rt" ); if ( !pFile ) return false; char szWordBuffer [MAX_STR_LENGTH]; while ( !feof ( pFile ) ) { char * szResult = fgets ( szWordBuffer, MAX_STR_LENGTH, pFile ); if ( !szResult ) break; int iPos = strlen ( szWordBuffer ) - 1; while ( iPos>=0 && isspace ( (unsigned char)szWordBuffer[iPos] ) ) szWordBuffer [iPos--] = '\0'; CISpellDictWord Word; char * szPosition = strchr ( szWordBuffer, '/' ); if ( !szPosition ) { szPosition = szWordBuffer; while ( *szPosition && !isspace ( (unsigned char)*szPosition ) ) ++szPosition; *szPosition = '\0'; Word.m_sWord = szWordBuffer; } else { *szPosition = '\0'; Word.m_sWord = szWordBuffer; ++szPosition; char * szFlags = szPosition; while ( *szPosition && !isspace ( (unsigned char)*szPosition ) ) ++szPosition; *szPosition = '\0'; Word.m_sFlags = szFlags; } m_dEntries.Add ( Word ); } fclose ( pFile ); return true; } void CISpellDict::IterateStart () { m_iIterator = 0; } const CISpellDict::CISpellDictWord * CISpellDict::IterateNext () { if ( m_iIterator>=m_dEntries.GetLength() ) return NULL; return &m_dEntries [m_iIterator++]; } ////////////////////////////////////////////////////////////////////////// enum RuleType_e { RULE_NONE, RULE_PREFIXES, RULE_SUFFIXES }; class CISpellAffixRule { public: CISpellAffixRule () {} CISpellAffixRule ( RuleType_e eRule, char cFlag, bool bCrossProduct, char * szCondition, char * szStrip, char * szAppend ); bool Apply ( CSphString & sWord ); char Flag () const; bool IsCrossProduct () const; bool IsPrefix () const; private: RuleType_e m_eRule; char m_cFlag; bool m_bCrossProduct; CSphString m_sCondition; CSphString m_sStrip; CSphString m_sAppend; int m_iWordLen; int m_iCondLen; int m_iStripLen; int m_iAppendLen; bool CheckSuffix ( const CSphString & sWord ) const; bool CheckPrefix ( const CSphString & sWord ) const; bool StripAppendSuffix ( CSphString & sWord ) const; bool StripAppendPrefix ( CSphString & sWord ) const; }; CISpellAffixRule::CISpellAffixRule ( RuleType_e eRule, char cFlag, bool bCrossProduct, char * szCondition, char * szStrip, char * szAppend ) : m_eRule ( eRule ) , m_cFlag ( cFlag ) , m_bCrossProduct ( bCrossProduct ) , m_sCondition ( szCondition ) , m_sStrip ( szStrip ) , m_sAppend ( szAppend ) , m_iWordLen ( 0 ) { m_iCondLen = szCondition ? strlen ( szCondition ) : 0; m_iStripLen = szStrip ? strlen ( szStrip ) : 0; m_iAppendLen = szAppend ? strlen ( szAppend ) : 0; } bool CISpellAffixRule::Apply ( CSphString & sWord ) { if ( m_sCondition.IsEmpty () ) return true; if ( sWord.IsEmpty () ) return false; m_iWordLen = strlen ( sWord.cstr () ); bool bDotCond = ( m_sCondition=="." ); if ( m_eRule==RULE_SUFFIXES ) { if ( !bDotCond && !CheckSuffix ( sWord ) ) return false; if ( !StripAppendSuffix ( sWord ) ) return false; } else { if ( !bDotCond && !CheckPrefix ( sWord ) ) return false; if ( !StripAppendPrefix ( sWord ) ) return false; } return true; } bool CISpellAffixRule::CheckSuffix ( const CSphString & sWord ) const { int iCondI = m_iCondLen-1; for ( int i=m_iWordLen-1; iCondI>=0 && i>=0; --i ) { if ( m_sCondition.cstr()[iCondI]!=']' ) { if ( m_sCondition.cstr()[iCondI]!=sWord.cstr()[i] ) return false; --iCondI; } else { int iRangeStart = -1; for ( int j=iCondI; j>=0 && iRangeStart==-1; --j ) if ( m_sCondition.cstr()[j]=='[' ) iRangeStart = j; if ( iRangeStart==-1 ) return false; else { if ( !IsInSet ( sWord.cstr () [i], m_sCondition.SubString ( iRangeStart + 1, iCondI - iRangeStart - 1 ).cstr () ) ) return false; iCondI = iRangeStart - 1; } } } return true; } bool CISpellAffixRule::StripAppendSuffix ( CSphString & sWord ) const { static char szTmp [ MAX_STR_LENGTH]; if ( !m_sStrip.IsEmpty () ) { if ( m_iWordLen < m_iStripLen ) return false; if ( strncmp ( sWord.cstr () + m_iWordLen - m_iStripLen, m_sStrip.cstr (), m_iStripLen ) ) return false; } strncpy ( szTmp, sWord.cstr (), m_iWordLen - m_iStripLen ); szTmp [m_iWordLen - m_iStripLen] = '\0'; if ( !m_sAppend.IsEmpty () ) strcat ( szTmp, m_sAppend.cstr () ); // NOLINT sWord = szTmp; return true; } bool CISpellAffixRule::CheckPrefix ( const CSphString & sWord ) const { int iCondI = 0; for ( int i = 0; iCondI < m_iCondLen && i < m_iWordLen; ++i ) { if ( m_sCondition.cstr()[iCondI]!='[' ) { if ( m_sCondition.cstr()[iCondI]!=sWord.cstr()[i] ) return false; ++iCondI; } else { int iRangeEnd = -1; for ( int j=iCondI; j m_dRules; char m_dCharset [256]; bool m_bFirstCaseConv; CSphString m_sLocale; CSphString m_sCharsetFile; bool m_bCheckCrosses; CSphLowercaser m_LowerCaser; bool m_bUseLowerCaser; bool m_bUseDictConversion; bool AddToCharset ( char * szRangeL, char * szRangeU ); void AddCharPair ( BYTE uCharL, BYTE uCharU ); void Strip ( char * szText ); char ToLowerCase ( char cChar ); void LoadLocale (); AffixFormat_e DetectFormat ( FILE * ); bool LoadISpell ( FILE * ); bool LoadMySpell ( FILE * ); }; CISpellAffix::CISpellAffix ( const char * szLocale, const char * szCharsetFile ) : m_bFirstCaseConv ( true ) , m_sLocale ( szLocale ) , m_sCharsetFile ( szCharsetFile ) , m_bCheckCrosses ( false ) , m_bUseLowerCaser ( false ) , m_bUseDictConversion ( false ) { } AffixFormat_e CISpellAffix::DetectFormat ( FILE * pFile ) { char sBuffer [MAX_STR_LENGTH]; while ( !feof ( pFile ) ) { char * sLine = fgets ( sBuffer, MAX_STR_LENGTH, pFile ); if ( !sLine ) break; if ( !strncmp ( sLine, "SFX", 3 ) ) return AFFIX_FORMAT_MYSPELL; if ( !strncmp ( sLine, "PFX", 3 ) ) return AFFIX_FORMAT_MYSPELL; if ( !strncmp ( sLine, "REP", 3 ) ) return AFFIX_FORMAT_MYSPELL; if ( !strncasecmp ( sLine, "prefixes", 8 ) ) return AFFIX_FORMAT_ISPELL; if ( !strncasecmp ( sLine, "suffixes", 8 ) ) return AFFIX_FORMAT_ISPELL; if ( !strncasecmp ( sLine, "flag", 4 ) ) return AFFIX_FORMAT_ISPELL; } return AFFIX_FORMAT_UNKNOWN; } bool CISpellAffix::Load ( const char * szFilename ) { if ( !szFilename ) return false; m_dRules.Reset (); memset ( m_dCharset, 0, sizeof ( m_dCharset ) ); m_bFirstCaseConv = true; m_bUseLowerCaser = false; m_bUseDictConversion = false; m_LowerCaser.Reset (); FILE * pFile = fopen ( szFilename, "rt" ); if ( !pFile ) return false; bool bResult = false; AffixFormat_e eFormat = DetectFormat ( pFile ); if ( eFormat==AFFIX_FORMAT_UNKNOWN ) printf ( "Failed to detect affix file format\n" ); else { fseek ( pFile, SEEK_SET, 0 ); printf ( "Using %s affix file format\n", AffixFormatName[eFormat] ); switch ( eFormat ) { case AFFIX_FORMAT_MYSPELL: bResult = LoadMySpell ( pFile ); break; case AFFIX_FORMAT_ISPELL: bResult = LoadISpell ( pFile ); break; case AFFIX_FORMAT_UNKNOWN: break; } } fclose ( pFile ); bool bHaveCrossPrefix = false; for ( int i = 0; i < m_dRules.GetLength () && !bHaveCrossPrefix; i++ ) if ( m_dRules[i].IsPrefix() && m_dRules[i].IsCrossProduct() ) bHaveCrossPrefix = true; bool bHaveCrossSuffix = false; for ( int i = 0; i < m_dRules.GetLength () && !bHaveCrossSuffix; i++ ) if ( !m_dRules[i].IsPrefix() && m_dRules[i].IsCrossProduct() ) bHaveCrossSuffix = true; m_bCheckCrosses = bHaveCrossPrefix && bHaveCrossSuffix; return bResult; } bool CISpellAffix::LoadISpell ( FILE * pFile ) { char szBuffer [ MAX_STR_LENGTH ]; char szCondition [ MAX_STR_LENGTH ]; char szStrip [ MAX_STR_LENGTH ]; char szAppend [ MAX_STR_LENGTH ]; RuleType_e eRule = RULE_NONE; char cFlag = '\0'; bool bCrossProduct = false; int iLine = 0; // TODO: parse all .aff character replacement commands while ( !feof ( pFile ) ) { char * szResult = fgets ( szBuffer, MAX_STR_LENGTH, pFile ); if ( !szResult ) break; iLine++; if ( !strncasecmp ( szBuffer, "prefixes", 8 ) ) { eRule = RULE_PREFIXES; continue; } if ( !strncasecmp ( szBuffer, "suffixes", 8 ) ) { eRule = RULE_SUFFIXES; continue; } if ( !strncasecmp ( szBuffer, "wordchars", 9 ) ) { char * szStart = szBuffer + 9; while ( *szStart && isspace ( (unsigned char) *szStart ) ) ++szStart; char * szRangeL = szStart; while ( *szStart && !isspace ( (unsigned char) *szStart ) ) ++szStart; if ( !*szStart ) { printf ( "WARNING: Line %d: invalid 'wordchars' statement\n", iLine ); continue; } *szStart = '\0'; ++szStart; while ( *szStart && isspace ( (unsigned char) *szStart ) ) ++szStart; char * szRangeU = szStart; while ( *szStart && !isspace ( (unsigned char) *szStart ) ) ++szStart; *szStart = '\0'; if ( !AddToCharset ( szRangeL, szRangeU ) ) printf ( "WARNING: Line %d: cannot add to charset: '%s' '%s'\n", iLine, szRangeL, szRangeU ); continue; } if ( !strncasecmp ( szBuffer, "flag", 4 ) ) { if ( eRule==RULE_NONE ) { printf ( "WARNING: Line %d: 'flag' appears before preffixes or suffixes\n", iLine ); continue; } char * szStart = szBuffer + 4; while ( *szStart && isspace ( (unsigned char) *szStart ) ) ++szStart; bCrossProduct = ( *szStart=='*' ); cFlag = bCrossProduct ? *(szStart + 1) : *(szStart); continue; } if ( eRule==RULE_NONE ) continue; char * szComment = strchr ( szBuffer, '#' ); if ( szComment ) *szComment = '\0'; if ( !* szBuffer ) continue; szCondition[0] = '\0'; szStrip[0] = '\0'; szAppend[0] = '\0'; int nFields = sscanf ( szBuffer, "%[^>\n]>%[^,\n],%[^\n]", szCondition, szStrip, szAppend ); // NOLINT Strip ( szCondition ); Strip ( szStrip ); Strip ( szAppend ); switch ( nFields ) { case 2: // no optional strip-string strcpy ( szAppend, szStrip ); // NOLINT szStrip[0] = '\0'; break; case 3: // all read break; default: // invalid repl continue; } CISpellAffixRule Rule ( eRule, cFlag, bCrossProduct, szCondition, szStrip, szAppend ); m_dRules.Add ( Rule ); } return true; } bool CISpellAffix::LoadMySpell ( FILE * pFile ) { char sBuffer [MAX_STR_LENGTH]; char sCondition [MAX_STR_LENGTH]; char sRemove [MAX_STR_LENGTH]; char sAppend [MAX_STR_LENGTH]; RuleType_e eRule = RULE_NONE; BYTE cFlag = 0; BYTE cCombine = 0; int iCount = 0, iLine = 0; const char * sMode = 0; while ( !feof ( pFile ) ) { char * sLine = fgets ( sBuffer, MAX_STR_LENGTH, pFile ); if ( !sLine ) break; ++iLine; // prefix and suffix rules RuleType_e eNewRule = RULE_NONE; if ( !strncmp ( sLine, "PFX", 3 ) ) { eNewRule = RULE_PREFIXES; sMode = "prefix"; } else if ( !strncmp ( sLine, "SFX", 3 ) ) { eNewRule = RULE_SUFFIXES; sMode = "suffix"; } if ( eNewRule!=RULE_NONE ) { sLine += 3; while ( *sLine && isspace ( (unsigned char) *sLine ) ) ++sLine; if ( eNewRule!=eRule ) // new rule header { if ( iCount ) printf ( "WARNING: Line %d: Premature end of entries.\n", iLine ); if ( sscanf ( sLine, "%c %c %d", &cFlag, &cCombine, &iCount )!=3 ) // NOLINT printf ( "WARNING; Line %d: Malformed %s header\n", iLine, sMode ); eRule = eNewRule; } else // current rule continued { *sRemove = *sAppend = 0; char cNewFlag; if ( sscanf ( sLine, "%c %s %s %s", &cNewFlag, sRemove, sAppend, sCondition )==4 ) // NOLINT { if ( cNewFlag!=cFlag ) printf ( "WARNING: Line %d: Flag character mismatch\n", iLine ); if ( *sRemove=='0' && *(sRemove + 1)==0 ) *sRemove = 0; if ( *sAppend=='0' && *(sAppend + 1)==0 ) *sAppend = 0; CISpellAffixRule Rule ( eRule, cFlag, cCombine=='Y', sCondition, sRemove, sAppend ); m_dRules.Add ( Rule ); } else printf ( "WARNING: Line %d: Malformed %s rule\n", iLine, sMode ); if ( !--iCount ) eRule = RULE_NONE; } continue; } } return true; } CISpellAffixRule * CISpellAffix::GetRule ( int iRule ) { return &m_dRules [iRule]; } int CISpellAffix::GetNumRules () const { return m_dRules.GetLength (); } bool CISpellAffix::CheckCrosses () const { return m_bCheckCrosses; } bool CISpellAffix::AddToCharset ( char * szRangeL, char * szRangeU ) { if ( !szRangeL || !szRangeU ) return false; int iLengthL = strlen ( szRangeL ); int iLengthU = strlen ( szRangeU ); bool bSetL = ( iLengthL>0 && szRangeL[0]=='[' && szRangeL[iLengthL-1]==']' ); bool bSetR = ( iLengthU>0 && szRangeU[0]=='[' && szRangeU[iLengthU-1]==']' ); if ( bSetL!=bSetR ) return false; if ( bSetL ) { szRangeL [iLengthL - 1] = '\0'; szRangeL = szRangeL + 1; szRangeU [iLengthU - 1] = '\0'; szRangeU = szRangeU + 1; BYTE uMinL, uMaxL; if ( !GetSetMinMax ( szRangeL, uMinL, uMaxL ) ) return false; BYTE uMinU, uMaxU; if ( !GetSetMinMax ( szRangeU, uMinU, uMaxU ) ) return false; if ( ( uMaxU - uMinU )!=( uMaxL - uMinL ) ) return false; for ( BYTE i=0; i<=( uMaxL - uMinL ); ++i ) if ( IsInSet ( uMinL + i, szRangeL ) && IsInSet ( uMinU + i, szRangeU ) ) AddCharPair ( uMinL + i, uMinU + i ); } else { if ( iLengthL > 4 || iLengthU > 4 ) return false; const char * szL = szRangeL; const char * szU = szRangeU; AddCharPair ( GetWordchar(szL), GetWordchar(szU) ); } m_bUseDictConversion = true; return true; } void CISpellAffix::AddCharPair ( BYTE uCharL, BYTE uCharU ) { m_dCharset [uCharU] = uCharL; } void CISpellAffix::Strip ( char * szText ) { char * szIterator1 = szText; char * szIterator2 = szText; while ( *szIterator1 ) { if ( !isspace ( (unsigned char) *szIterator1 ) && *szIterator1!='-' ) { *szIterator2 = *szIterator1; ++szIterator2; } ++szIterator1; } *szIterator2 = '\0'; while ( *szText ) { *szText = ToLowerCase ( *szText ); ++szText; } } char CISpellAffix::ToLowerCase ( char cChar ) { if ( m_bFirstCaseConv ) { LoadLocale (); m_bFirstCaseConv = false; } // dictionary conversion if ( m_bUseDictConversion ) return m_dCharset [(BYTE) cChar] ? m_dCharset [(BYTE) cChar] : cChar; // user-defined character mapping if ( m_bUseLowerCaser ) { char cResult = (char)m_LowerCaser.ToLower ( (BYTE) cChar ); return cResult ? cResult : cChar; } // user-specified code page conversion return (char)tolower ( (BYTE)cChar ); // workaround for systems (eg. FreeBSD) which default to signed char. marvelous! } void CISpellAffix::LoadLocale () { if ( m_bUseDictConversion ) printf ( "Using dictionary-defined character set\n" ); else if ( !m_sCharsetFile.IsEmpty () ) { FILE * pFile = fopen ( m_sCharsetFile.cstr (), "rt" ); if ( pFile ) { printf ( "Using charater set from '%s'\n", m_sCharsetFile.cstr () ); const int MAX_CHARSET_LENGTH = 4096; char szBuffer [MAX_CHARSET_LENGTH]; char * szResult = fgets ( szBuffer, MAX_CHARSET_LENGTH, pFile ); if ( szResult ) { CSphVector dRemaps; if ( sphParseCharset ( szBuffer, dRemaps ) ) { m_bUseLowerCaser = true; m_LowerCaser.AddRemaps ( dRemaps, 0 ); } else { printf ( "Failed to parse charset from '%s'\n", m_sCharsetFile.cstr() ); } } else { printf ( "Failed to read charset from '%s'\n", m_sCharsetFile.cstr() ); } fclose ( pFile ); } else { printf ( "Failed to open '%s'\n", m_sCharsetFile.cstr() ); } } else { if ( !m_sLocale.IsEmpty () ) { char dLocaleC[256], dLocaleUser[256]; setlocale ( LC_ALL, "C" ); for ( int i=0; i<256; i++ ) dLocaleC[i] = (char) tolower(i); char * szLocale = setlocale ( LC_CTYPE, m_sLocale.cstr() ); if ( szLocale ) { printf ( "Using user-defined locale (locale=%s)\n", m_sLocale.cstr() ); for ( int i=0; i<256; i++ ) dLocaleUser[i] = (char) tolower(i); if ( !memcmp ( dLocaleC, dLocaleUser, 256 ) ) printf ( "WARNING: user-defined locale provides the same case conversion as the default \"C\" locale\n" ); } else printf ( "WARNING: could not set user-defined locale for case conversions (locale=%s)\n", m_sLocale.cstr() ); } else printf ( "WARNING: no character set specified\n" ); } } ////////////////////////////////////////////////////////////////////////// enum OutputMode_e { M_DEBUG, M_DUPLICATES, M_LAST, M_EXACT_OR_LONGEST, M_DEFAULT = M_EXACT_OR_LONGEST }; const char * dModeName[] = { "debug", "duplicates", "last" }; struct MapInfo_t { CSphString m_sWord; char m_sRules[3]; }; struct WordLess { inline bool IsLess ( const char * a, const char * b ) const { return strcoll ( a, b ) < 0; } }; typedef CSphOrderedHash < CSphVector, CSphString, CSphStrHashFunc, 100000 > WordMap_t; static void EmitResult ( WordMap_t & tMap , const CSphString & sFrom, const CSphString & sTo, char cRuleA = 0, char cRuleB = 0 ) { if ( !tMap.Exists(sFrom) ) tMap.Add ( CSphVector(), sFrom ); MapInfo_t tInfo; tInfo.m_sWord = sTo; tInfo.m_sRules[0] = cRuleA; tInfo.m_sRules[1] = cRuleB; tInfo.m_sRules[2] = 0; tMap[sFrom].Add ( tInfo ); } int main ( int iArgs, char ** dArgs ) { OutputMode_e eMode = M_DEFAULT; bool bUseCustomCharset = false; CSphString sDict, sAffix, sLocale, sCharsetFile, sResult = "result.txt"; printf ( "spelldump, an ispell dictionary dumper\n\n" ); int i = 1; for ( ; i < iArgs; i++ ) { if ( !strcmp ( dArgs[i], "-c" ) ) { if ( ++i==iArgs ) break; bUseCustomCharset = true; sCharsetFile = dArgs[i]; } else if ( !strcmp ( dArgs[i], "-m" ) ) { if ( ++i==iArgs ) break; char * sMode = dArgs[i]; if ( !strcmp ( sMode, "debug" ) ) { eMode = M_DEBUG; continue; } if ( !strcmp ( sMode, "duplicates" ) ) { eMode = M_DUPLICATES; continue; } if ( !strcmp ( sMode, "last" ) ) { eMode = M_LAST; continue; } if ( !strcmp ( sMode, "default" ) ) { eMode = M_DEFAULT; continue; } printf ( "Unrecognized mode: %s\n", sMode ); return 1; } else break; } switch ( iArgs - i ) { case 4: sLocale = dArgs[i + 3]; case 3: sResult = dArgs[i + 2]; case 2: sAffix = dArgs[i + 1]; sDict = dArgs[i]; break; default: printf ( "Usage: spelldump [options] [result] [locale-name]\n\n" "Options:\n" "-c \tuse case convertion defined in \n" "-m \toutput (conflict resolution) mode:\n" "\t\tdefault - try to guess the best way to resolve a conflict\n" "\t\tlast - choose last entry\n" "\t\tdebug - dump all mappings (with rules)\n" "\t\tduplicates - dump duplicate mappings only (with rules)\n" ); if ( iArgs>1 ) { printf ( "\n" "Examples:\n" "spelldump en.dict en.aff\n" "spelldump ru.dict ru.aff ru.txt ru_RU.CP1251\n" "spelldump ru.dict ru.aff ru.txt .1251\n" ); } return 1; } printf ( "Loading dictionary...\n" ); CISpellDict Dict; if ( !Dict.Load ( sDict.cstr () ) ) sphDie ( "Error loading dictionary file '%s'\n", sDict.IsEmpty () ? "" : sDict.cstr () ); printf ( "Loading affix file...\n" ); CISpellAffix Affix ( sLocale.cstr (), bUseCustomCharset ? sCharsetFile.cstr () : NULL ); if ( !Affix.Load ( sAffix.cstr () ) ) sphDie ( "Error loading affix file '%s'\n", sAffix.IsEmpty () ? "" : sAffix.cstr () ); if ( sResult.IsEmpty () ) sphDie ( "No result file specified\n" ); FILE * pFile = fopen ( sResult.cstr (), "wt" ); if ( !pFile ) sphDie ( "Unable to open '%s' for writing\n", sResult.cstr () ); if ( eMode!=M_DEFAULT ) printf ( "Output mode: %s\n", dModeName[eMode] ); Dict.IterateStart (); WordMap_t tWordMap; const CISpellDict::CISpellDictWord * pWord = NULL; int nDone = 0; while ( ( pWord = Dict.IterateNext () )!=NULL ) { EmitResult ( tWordMap, pWord->m_sWord, pWord->m_sWord ); if ( ( ++nDone % 10 )==0 ) { printf ( "\rDictionary words processed: %d", nDone ); fflush ( stdout ); } if ( pWord->m_sFlags.IsEmpty() ) continue; CSphString sWord, sWordForCross; int iFlagLen = strlen ( pWord->m_sFlags.cstr () ); for ( int iFlag1 = 0; iFlag1 < iFlagLen; ++iFlag1 ) for ( int iRule1 = 0; iRule1 < Affix.GetNumRules (); ++iRule1 ) { CISpellAffixRule * pRule1 = Affix.GetRule ( iRule1 ); if ( pRule1->Flag()!=pWord->m_sFlags.cstr()[iFlag1] ) continue; sWord = pWord->m_sWord; if ( !pRule1->Apply ( sWord ) ) continue; EmitResult ( tWordMap, sWord, pWord->m_sWord, pRule1->Flag() ); // apply other rules if ( !Affix.CheckCrosses() ) continue; if ( !pRule1->IsCrossProduct() ) continue; for ( int iFlag2 = iFlag1 + 1; iFlag2 < iFlagLen; ++iFlag2 ) for ( int iRule2 = 0; iRule2 < Affix.GetNumRules (); ++iRule2 ) { CISpellAffixRule * pRule2 = Affix.GetRule ( iRule2 ); if ( !pRule2->IsCrossProduct () || pRule2->Flag()!=pWord->m_sFlags.cstr()[iFlag2] || pRule2->IsPrefix()==pRule1->IsPrefix() ) continue; sWordForCross = sWord; if ( pRule2->Apply ( sWordForCross ) ) EmitResult ( tWordMap, sWordForCross, pWord->m_sWord, pRule1->Flag(), pRule2->Flag() ); } } } printf ( "\rDictionary words processed: %d\n", nDone ); // output CSphVector dKeys; tWordMap.IterateStart(); while ( tWordMap.IterateNext() ) dKeys.Add ( tWordMap.IterateGetKey().cstr() ); dKeys.Sort ( WordLess() ); ARRAY_FOREACH ( iKey, dKeys ) { const CSphVector & dWords = tWordMap[dKeys[iKey]]; const char * sKey = dKeys[iKey]; switch ( eMode ) { case M_LAST: fprintf ( pFile, "%s > %s\n", sKey, dWords.Last().m_sWord.cstr() ); break; case M_EXACT_OR_LONGEST: { int iMatch = 0; int iLength = 0; ARRAY_FOREACH ( i, dWords ) { if ( dWords[i].m_sWord==sKey ) { iMatch = i; break; } int iWordLength = strlen ( dWords[i].m_sWord.cstr() ); if ( iWordLength>iLength ) { iLength = iWordLength; iMatch = i; } } fprintf ( pFile, "%s > %s\n", sKey, dWords[iMatch].m_sWord.cstr() ); break; } case M_DUPLICATES: if ( dWords.GetLength()==1 ) break; case M_DEBUG: ARRAY_FOREACH ( i, dWords ) fprintf ( pFile, "%s > %s %s/%d\n", sKey, dWords[i].m_sWord.cstr(), dWords[i].m_sRules, dWords.GetLength() ); break; } } fclose ( pFile ); return 0; } // // $Id: spelldump.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/indextool.cpp0000644000176700017710000003613411711621267017256 0ustar deogardeogar// // $Id: indextool.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxutils.h" #include "sphinxint.h" #include "sphinxrt.h" #include void StripStdin ( const char * sIndexAttrs, const char * sRemoveElements ) { CSphString sError; CSphHTMLStripper tStripper ( true ); if ( !tStripper.SetIndexedAttrs ( sIndexAttrs, sError ) || !tStripper.SetRemovedElements ( sRemoveElements, sError ) ) sphDie ( "failed to configure stripper: %s", sError.cstr() ); CSphVector dBuffer; while ( !feof(stdin) ) { char sBuffer[1024]; int iLen = fread ( sBuffer, 1, sizeof(sBuffer), stdin ); if ( !iLen ) break; int iPos = dBuffer.GetLength(); dBuffer.Resize ( iPos+iLen ); memcpy ( &dBuffer[iPos], sBuffer, iLen ); } dBuffer.Add ( 0 ); tStripper.Strip ( &dBuffer[0] ); fprintf ( stdout, "dumping stripped results...\n%s\n", &dBuffer[0] ); } void DoOptimization ( const CSphString & sIndex, const CSphConfig & hConfig ); int main ( int argc, char ** argv ) { fprintf ( stdout, SPHINX_BANNER ); if ( argc<=1 ) { fprintf ( stdout, "Usage: indextool [OPTIONS]\n" "\n" "Commands are:\n" "--dumpheader \tdump index header by file name\n" "--dumpconfig \tdump index header in config format by file name\n" "--dumpheader \tdump index header by index name\n" "--dumpdocids \tdump docids by index name\n" "--dumphitlist \n" "--dumphitlist --wordid \n" "\t\t\t\tdump hits for given keyword\n" "--check \t\tperform index consistency check\n" "--htmlstrip \t\tfilter stdin usng HTML stripper settings\n" "\t\t\t\tfor a given index (taken from sphinx.conf)\n" "--optimize-rt-klists \n" "\t\t\t\tperform kill list opimization in rt's disk chunks\n" "\t\t\t\tfor a given index (taken from sphinx.conf) or --all\n" "\n" "Options are:\n" "-c, --config \t\tuse given config file instead of defaults\n" "--strip-path\t\t\tstrip path from filenames referenced by index\n" "\t\t\t\t(eg. stopwords, exceptions, etc)\n" ); exit ( 0 ); } ////////////////////// // parse command line ////////////////////// #define OPT(_a1,_a2) else if ( !strcmp(argv[i],_a1) || !strcmp(argv[i],_a2) ) #define OPT1(_a1) else if ( !strcmp(argv[i],_a1) ) const char * sOptConfig = NULL; CSphString sDumpHeader, sIndex, sKeyword; bool bWordid = false; bool bStripPath = false; enum { CMD_NOTHING, CMD_DUMPHEADER, CMD_DUMPCONFIG, CMD_DUMPDOCIDS, CMD_DUMPHITLIST, CMD_CHECK, CMD_STRIP, CMD_OPTIMIZE } eCommand = CMD_NOTHING; int i; for ( i=1; i=argc ) break; OPT ( "-c", "--config" ) sOptConfig = argv[++i]; OPT1 ( "--dumpheader" ) { eCommand = CMD_DUMPHEADER; sDumpHeader = argv[++i]; } OPT1 ( "--dumpconfig" ) { eCommand = CMD_DUMPCONFIG; sDumpHeader = argv[++i]; } OPT1 ( "--dumpdocids" ) { eCommand = CMD_DUMPDOCIDS; sIndex = argv[++i]; } OPT1 ( "--check" ) { eCommand = CMD_CHECK; sIndex = argv[++i]; } OPT1 ( "--htmlstrip" ) { eCommand = CMD_STRIP; sIndex = argv[++i]; } OPT1 ( "--strip-path" ) { bStripPath = true; } OPT1 ( "--optimize-rt-klists" ) { eCommand = CMD_OPTIMIZE; sIndex = argv[++i]; if ( sIndex=="--all" ) sIndex = ""; } // options with 2 args else if ( (i+2)>=argc ) // NOLINT { // not enough args break; } else if ( !strcmp ( argv[i], "--dumphitlist" ) ) { eCommand = CMD_DUMPHITLIST; sIndex = argv[++i]; if ( !strcmp ( argv[i+1], "--wordid" ) ) { if ( (i+3)SetWordlistPreload ( false ); CSphString sWarn; if ( !pIndex->Prealloc ( false, bStripPath, sWarn ) ) sphDie ( "index '%s': prealloc failed: %s\n", sIndex.cstr(), pIndex->GetLastError().cstr() ); if ( !pIndex->Preread() ) sphDie ( "index '%s': preread failed: %s\n", sIndex.cstr(), pIndex->GetLastError().cstr() ); break; } // do the dew switch ( eCommand ) { case CMD_NOTHING: sphDie ( "nothing to do; specify a command (run indextool w/o switches for help)" ); case CMD_DUMPHEADER: case CMD_DUMPCONFIG: { if ( hConf("index") && hConf["index"](sDumpHeader) ) { fprintf ( stdout, "dumping header for index '%s'...\n", sDumpHeader.cstr() ); if ( !hConf["index"][sDumpHeader]("path") ) sphDie ( "missing 'path' for index '%s'\n", sDumpHeader.cstr() ); sDumpHeader.SetSprintf ( "%s.sph", hConf["index"][sDumpHeader]["path"].cstr() ); } fprintf ( stdout, "dumping header file '%s'...\n", sDumpHeader.cstr() ); CSphIndex * pIndex = sphCreateIndexPhrase ( NULL, "" ); pIndex->DebugDumpHeader ( stdout, sDumpHeader.cstr(), eCommand==CMD_DUMPCONFIG ); break; } case CMD_DUMPDOCIDS: fprintf ( stdout, "dumping docids for index '%s'...\n", sIndex.cstr() ); pIndex->DebugDumpDocids ( stdout ); break; case CMD_DUMPHITLIST: fprintf ( stdout, "dumping hitlist for index '%s' keyword '%s'...\n", sIndex.cstr(), sKeyword.cstr() ); pIndex->DebugDumpHitlist ( stdout, sKeyword.cstr(), bWordid ); break; case CMD_CHECK: fprintf ( stdout, "checking index '%s'...\n", sIndex.cstr() ); return pIndex->DebugCheck ( stdout ); case CMD_STRIP: { const CSphConfigSection & hIndex = hConf["index"][sIndex]; if ( hIndex.GetInt ( "html_strip" )==0 ) sphDie ( "HTML stripping is not enabled in index '%s'", sIndex.cstr() ); StripStdin ( hIndex.GetStr ( "html_index_attrs" ), hIndex.GetStr ( "html_remove_elements" ) ); } break; case CMD_OPTIMIZE: DoOptimization ( sIndex, hConf ); break; default: sphDie ( "INTERNAL ERROR: unhandled command (id=%d)", (int)eCommand ); } return 0; } #if USE_WINDOWS #include // for open() #define sphSeek _lseeki64 #else #define sphSeek lseek #endif bool FixupFiles ( const CSphVector & dFiles, CSphString & sError ) { ARRAY_FOREACH ( i, dFiles ) { const CSphString & sPath = dFiles[i]; CSphString sKlistOld, sKlistNew, sHeader; sKlistOld.SetSprintf ( "%s.spk", sPath.cstr() ); sKlistNew.SetSprintf ( "%s.new.spk", sPath.cstr() ); sHeader.SetSprintf ( "%s.sph", sPath.cstr() ); DWORD iCount = 0; { CSphAutoreader rdHeader, rdKlistNew, rdKlistOld; if ( !rdHeader.Open ( sHeader, sError ) || !rdKlistNew.Open ( sKlistNew, sError ) || !rdKlistOld.Open ( sKlistOld, sError ) ) return false; const SphOffset_t iSize = rdKlistNew.GetFilesize(); iCount = (DWORD)( iSize / sizeof(SphAttr_t) ); } if ( ::unlink ( sKlistOld.cstr() )!=0 ) { sError.SetSprintf ( "file: '%s', error: '%s'", sKlistOld.cstr(), strerror(errno) ); return false; } if ( ::rename ( sKlistNew.cstr(), sKlistOld.cstr() )!=0 ) { sError.SetSprintf ( "files: '%s'->'%s', error: '%s'", sKlistNew.cstr(), sKlistOld.cstr(), strerror(errno) ); return false; } int iFD = ::open ( sHeader.cstr(), SPH_O_BINARY | O_RDWR, 0644 ); if ( iFD<0 ) { sError.SetSprintf ( "file: '%s', error: '%s'", sHeader.cstr(), strerror(errno) ); return false; } if ( sphSeek ( iFD, -4, SEEK_END )==-1L ) { sError.SetSprintf ( "file: '%s', error: '%s'", sHeader.cstr(), strerror(errno) ); SafeClose ( iFD ); return false; } if ( ::write ( iFD, &iCount, 4 )==-1 ) { sError.SetSprintf ( "file: '%s', error: '%s'", sHeader.cstr(), strerror(errno) ); SafeClose ( iFD ); return false; } SafeClose ( iFD ); } return true; } bool DoKlistsOptimization ( int iRowSize, const char * sPath, int iChunkCount, CSphVector & dFiles ) { CSphTightVector dLiveID; CSphString sError; for ( int iChunk=0; iChunk dKlist; if ( dLiveID.GetLength()>0 ) { assert ( rdKList.GetFilesize()0 ) ARRAY_FOREACH ( i, dLiveID ) if ( dLiveID[i]==0 ) dLiveID.RemoveFast ( i-- ); assert ( dLiveID.GetLength()+dKlist.GetLength()==iWasLive ); dLiveID.Sort(); } // 3d step write new k-list if ( dKlist.GetLength()>0 ) wrNew.PutBytes ( dKlist.Begin(), dKlist.GetLength()*sizeof(SphAttr_t) ); dKlist.Reset(); wrNew.CloseFile(); // 4th step merge ID from this segment into live ids if ( iChunk!=iChunkCount-1 ) { const int iWasLive = Max ( dLiveID.GetLength()-1, 0 ); const int iRowCount = (int)( rdAttr.GetFilesize() / ( (DOCINFO_IDSIZE+iRowSize)*4 ) ); for ( int i=0; i dFiles; hConf["index"].IterateStart (); while ( hConf["index"].IterateNext () ) { CSphString sError; const CSphConfigSection & hIndex = hConf["index"].IterateGet (); const char * sIndexName = hConf["index"].IterateGetKey().cstr(); if ( !hIndex("type") || hIndex["type"]!="rt" ) continue; if ( !sIndex.IsEmpty() && sIndex!=sIndexName ) continue; if ( !hIndex.Exists ( "path" ) ) { fprintf ( stdout, "key 'path' not found in index '%s' - skiped\n", sIndexName ); continue; } const int64_t tmIndexStart = sphMicroTimer(); CSphSchema tSchema ( sIndexName ); CSphColumnInfo tCol; // fields for ( CSphVariant * v=hIndex("rt_field"); v; v=v->m_pNext ) { tCol.m_sName = v->cstr(); tSchema.m_dFields.Add ( tCol ); } if ( !tSchema.m_dFields.GetLength() ) { fprintf ( stdout, "index '%s': no fields configured (use rt_field directive) - skiped\n", sIndexName ); continue; } // attrs const int iNumTypes = 5; const char * sTypes[iNumTypes] = { "rt_attr_uint", "rt_attr_bigint", "rt_attr_float", "rt_attr_timestamp", "rt_attr_string" }; const ESphAttr iTypes[iNumTypes] = { SPH_ATTR_INTEGER, SPH_ATTR_BIGINT, SPH_ATTR_FLOAT, SPH_ATTR_TIMESTAMP, SPH_ATTR_STRING }; for ( int iType=0; iTypem_pNext ) { tCol.m_sName = v->cstr(); tCol.m_eAttrType = iTypes[iType]; tSchema.AddAttr ( tCol, false ); } } const char * sPath = hIndex["path"].cstr(); CSphString sMeta; sMeta.SetSprintf ( "%s.meta", sPath ); CSphAutoreader rdMeta; if ( !rdMeta.Open ( sMeta.cstr(), sError ) ) { fprintf ( stdout, "%s\n", sError.cstr() ); continue; } rdMeta.SeekTo ( 8, 4 ); const int iDiskCunkCount = rdMeta.GetDword(); if ( !DoKlistsOptimization ( tSchema.GetRowSize(), sPath, iDiskCunkCount, dFiles ) ) sphDie ( "can't cook k-list '%s'", sPath ); const int64_t tmIndexDone = sphMicroTimer(); fprintf ( stdout, "\nindex '%s' done in %.3f sec\n", sIndexName, float(tmIndexDone-tmIndexStart )/1000000.0f ); iDone++; } const int64_t tmIndexesDone = sphMicroTimer(); fprintf ( stdout, "\ntotal processed=%d in %.3f sec\n", iDone, float(tmIndexesDone-tmStart )/1000000.0f ); CSphString sError("none"); if ( !FixupFiles ( dFiles, sError ) ) fprintf ( stdout, "error during files fixup: %s\n", sError.cstr() ); const int64_t tmDone = sphMicroTimer(); fprintf ( stdout, "\nfinished in %.3f sec\n", float(tmDone-tmStart )/1000000.0f ); } // // $Id: indextool.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/md5.cpp0000644000176700017710000003022210454516162015726 0ustar deogardeogar/* Copyright (C) 1999, 2000, 2002 Aladdin Enterprises. All rights reserved. This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. L. Peter Deutsch ghost@aladdin.com */ /* $Id: md5.cpp 374 2006-07-10 18:29:06Z shodan $ */ /* Independent implementation of MD5 (RFC 1321). This code implements the MD5 Algorithm defined in RFC 1321, whose text is available at http://www.ietf.org/rfc/rfc1321.txt The code is derived from the text of the RFC, including the test suite (section A.5) but excluding the rest of Appendix A. It does not include any code or documentation that is identified in the RFC as being copyrighted. The original and principal author of md5.c is L. Peter Deutsch . Other authors are noted in the change history that follows (in reverse chronological order): 2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order either statically or dynamically; added missing #include in library. 2002-03-11 lpd Corrected argument list for main(), and added int return type, in test program and T value program. 2002-02-21 lpd Added missing #include in test program. 2000-07-03 lpd Patched to eliminate warnings about "constant is unsigned in ANSI C, signed in traditional"; made test program self-checking. 1999-11-04 lpd Edited comments slightly for automatic TOC extraction. 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5). 1999-05-03 lpd Original version. */ #include "md5.h" #include #undef BYTE_ORDER /* 1 = big-endian, -1 = little-endian, 0 = unknown */ #ifdef ARCH_IS_BIG_ENDIAN # define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1) #else # define BYTE_ORDER 0 #endif #define T_MASK ((md5_word_t)~0) #define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87) #define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9) #define T3 0x242070db #define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111) #define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050) #define T6 0x4787c62a #define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec) #define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe) #define T9 0x698098d8 #define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850) #define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e) #define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841) #define T13 0x6b901122 #define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c) #define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71) #define T16 0x49b40821 #define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d) #define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf) #define T19 0x265e5a51 #define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855) #define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2) #define T22 0x02441453 #define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e) #define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437) #define T25 0x21e1cde6 #define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829) #define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278) #define T28 0x455a14ed #define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa) #define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07) #define T31 0x676f02d9 #define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375) #define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd) #define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e) #define T35 0x6d9d6122 #define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3) #define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb) #define T38 0x4bdecfa9 #define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f) #define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f) #define T41 0x289b7ec6 #define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805) #define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a) #define T44 0x04881d05 #define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6) #define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a) #define T47 0x1fa27cf8 #define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a) #define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb) #define T50 0x432aff97 #define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58) #define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6) #define T53 0x655b59c3 #define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d) #define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82) #define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e) #define T57 0x6fa87e4f #define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f) #define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb) #define T60 0x4e0811a1 #define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d) #define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca) #define T63 0x2ad7d2bb #define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e) static void md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/) { md5_word_t a = pms->abcd[0], b = pms->abcd[1], c = pms->abcd[2], d = pms->abcd[3]; md5_word_t t; #if BYTE_ORDER > 0 /* Define storage only for big-endian CPUs. */ md5_word_t X[16]; #else /* Define storage for little-endian or both types of CPUs. */ md5_word_t xbuf[16]; const md5_word_t *X; #endif { #if BYTE_ORDER == 0 /* * Determine dynamically whether this is a big-endian or * little-endian machine, since we can use a more efficient * algorithm on the latter. */ static const int w = 1; if (*((const md5_byte_t *)&w)) /* dynamic little-endian */ #endif #if BYTE_ORDER <= 0 /* little-endian */ { /* * On little-endian machines, we can process properly aligned * data without copying it. */ if (!((data - (const md5_byte_t *)0) & 3)) { /* data are properly aligned */ X = (const md5_word_t *)data; } else { /* not aligned */ memcpy(xbuf, data, 64); X = xbuf; } } #endif #if BYTE_ORDER == 0 else /* dynamic big-endian */ #endif #if BYTE_ORDER >= 0 /* big-endian */ { /* * On big-endian machines, we must arrange the bytes in the * right order. */ const md5_byte_t *xp = data; int i; # if BYTE_ORDER == 0 X = xbuf; /* (dynamic only) */ # else # define xbuf X /* (static only) */ # endif for (i = 0; i < 16; ++i, xp += 4) xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24); } #endif } #define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n)))) /* Round 1. */ /* Let [abcd k s i] denote the operation a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */ #define F(x, y, z) (((x) & (y)) | (~(x) & (z))) #define SET(a, b, c, d, k, s, Ti)\ t = a + F(b,c,d) + X[k] + Ti;\ a = ROTATE_LEFT(t, s) + b /* Do the following 16 operations. */ SET(a, b, c, d, 0, 7, T1); SET(d, a, b, c, 1, 12, T2); SET(c, d, a, b, 2, 17, T3); SET(b, c, d, a, 3, 22, T4); SET(a, b, c, d, 4, 7, T5); SET(d, a, b, c, 5, 12, T6); SET(c, d, a, b, 6, 17, T7); SET(b, c, d, a, 7, 22, T8); SET(a, b, c, d, 8, 7, T9); SET(d, a, b, c, 9, 12, T10); SET(c, d, a, b, 10, 17, T11); SET(b, c, d, a, 11, 22, T12); SET(a, b, c, d, 12, 7, T13); SET(d, a, b, c, 13, 12, T14); SET(c, d, a, b, 14, 17, T15); SET(b, c, d, a, 15, 22, T16); #undef SET /* Round 2. */ /* Let [abcd k s i] denote the operation a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */ #define G(x, y, z) (((x) & (z)) | ((y) & ~(z))) #define SET(a, b, c, d, k, s, Ti)\ t = a + G(b,c,d) + X[k] + Ti;\ a = ROTATE_LEFT(t, s) + b /* Do the following 16 operations. */ SET(a, b, c, d, 1, 5, T17); SET(d, a, b, c, 6, 9, T18); SET(c, d, a, b, 11, 14, T19); SET(b, c, d, a, 0, 20, T20); SET(a, b, c, d, 5, 5, T21); SET(d, a, b, c, 10, 9, T22); SET(c, d, a, b, 15, 14, T23); SET(b, c, d, a, 4, 20, T24); SET(a, b, c, d, 9, 5, T25); SET(d, a, b, c, 14, 9, T26); SET(c, d, a, b, 3, 14, T27); SET(b, c, d, a, 8, 20, T28); SET(a, b, c, d, 13, 5, T29); SET(d, a, b, c, 2, 9, T30); SET(c, d, a, b, 7, 14, T31); SET(b, c, d, a, 12, 20, T32); #undef SET /* Round 3. */ /* Let [abcd k s t] denote the operation a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */ #define H(x, y, z) ((x) ^ (y) ^ (z)) #define SET(a, b, c, d, k, s, Ti)\ t = a + H(b,c,d) + X[k] + Ti;\ a = ROTATE_LEFT(t, s) + b /* Do the following 16 operations. */ SET(a, b, c, d, 5, 4, T33); SET(d, a, b, c, 8, 11, T34); SET(c, d, a, b, 11, 16, T35); SET(b, c, d, a, 14, 23, T36); SET(a, b, c, d, 1, 4, T37); SET(d, a, b, c, 4, 11, T38); SET(c, d, a, b, 7, 16, T39); SET(b, c, d, a, 10, 23, T40); SET(a, b, c, d, 13, 4, T41); SET(d, a, b, c, 0, 11, T42); SET(c, d, a, b, 3, 16, T43); SET(b, c, d, a, 6, 23, T44); SET(a, b, c, d, 9, 4, T45); SET(d, a, b, c, 12, 11, T46); SET(c, d, a, b, 15, 16, T47); SET(b, c, d, a, 2, 23, T48); #undef SET /* Round 4. */ /* Let [abcd k s t] denote the operation a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */ #define I(x, y, z) ((y) ^ ((x) | ~(z))) #define SET(a, b, c, d, k, s, Ti)\ t = a + I(b,c,d) + X[k] + Ti;\ a = ROTATE_LEFT(t, s) + b /* Do the following 16 operations. */ SET(a, b, c, d, 0, 6, T49); SET(d, a, b, c, 7, 10, T50); SET(c, d, a, b, 14, 15, T51); SET(b, c, d, a, 5, 21, T52); SET(a, b, c, d, 12, 6, T53); SET(d, a, b, c, 3, 10, T54); SET(c, d, a, b, 10, 15, T55); SET(b, c, d, a, 1, 21, T56); SET(a, b, c, d, 8, 6, T57); SET(d, a, b, c, 15, 10, T58); SET(c, d, a, b, 6, 15, T59); SET(b, c, d, a, 13, 21, T60); SET(a, b, c, d, 4, 6, T61); SET(d, a, b, c, 11, 10, T62); SET(c, d, a, b, 2, 15, T63); SET(b, c, d, a, 9, 21, T64); #undef SET /* Then perform the following additions. (That is increment each of the four registers by the value it had before this block was started.) */ pms->abcd[0] += a; pms->abcd[1] += b; pms->abcd[2] += c; pms->abcd[3] += d; } void md5_init(md5_state_t *pms) { pms->count[0] = pms->count[1] = 0; pms->abcd[0] = 0x67452301; pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476; pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301; pms->abcd[3] = 0x10325476; } void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes) { const md5_byte_t *p = data; int left = nbytes; int offset = (pms->count[0] >> 3) & 63; md5_word_t nbits = (md5_word_t)(nbytes << 3); if (nbytes <= 0) return; /* Update the message length. */ pms->count[1] += nbytes >> 29; pms->count[0] += nbits; if (pms->count[0] < nbits) pms->count[1]++; /* Process an initial partial block. */ if (offset) { int copy = (offset + nbytes > 64 ? 64 - offset : nbytes); memcpy(pms->buf + offset, p, copy); if (offset + copy < 64) return; p += copy; left -= copy; md5_process(pms, pms->buf); } /* Process full blocks. */ for (; left >= 64; p += 64, left -= 64) md5_process(pms, p); /* Process a final partial block. */ if (left) memcpy(pms->buf, p, left); } void md5_finish(md5_state_t *pms, md5_byte_t digest[16]) { static const md5_byte_t pad[64] = { 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; md5_byte_t data[8]; int i; /* Save the length before padding. */ for (i = 0; i < 8; ++i) data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3)); /* Pad to 56 bytes mod 64. */ md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1); /* Append the length. */ md5_append(pms, data, 8); for (i = 0; i < 16; ++i) digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3)); } sphinx-2.0.4-release/src/indexer.cpp0000644000176700017710000014573411711621267016716 0ustar deogardeogar// // $Id: indexer.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxint.h" #include "sphinxutils.h" #include #include #include #include #include #if USE_WINDOWS #define snprintf _snprintf #include #include #else #include #endif ///////////////////////////////////////////////////////////////////////////// bool g_bQuiet = false; bool g_bProgress = true; bool g_bPrintQueries = false; const char * g_sBuildStops = NULL; int g_iTopStops = 100; bool g_bRotate = false; bool g_bRotateEach = false; bool g_bBuildFreqs = false; int g_iMemLimit = 0; int g_iMaxXmlpipe2Field = 0; int g_iWriteBuffer = 0; int g_iMaxFileFieldBuffer = 1024*1024; ESphOnFileFieldError g_eOnFileFieldError = FFE_IGNORE_FIELD; const int EXT_COUNT = 8; const char * g_dExt[EXT_COUNT] = { "sph", "spa", "spi", "spd", "spp", "spm", "spk", "sps" }; char g_sMinidump[256]; #define ROTATE_MIN_INTERVAL 100000 // rotate interval 100 ms ///////////////////////////////////////////////////////////////////////////// template < typename T > struct CSphMTFHashEntry { CSphString m_sKey; CSphMTFHashEntry * m_pNext; int m_iSlot; T m_tValue; }; template < typename T, int SIZE, class HASHFUNC > class CSphMTFHash { public: /// ctor CSphMTFHash () { m_pData = new CSphMTFHashEntry * [ SIZE ]; for ( int i=0; i * pHead = m_pData[i]; while ( pHead ) { CSphMTFHashEntry * pNext = pHead->m_pNext; SafeDelete ( pHead ); pHead = pNext; } } } /// add record to hash /// OPTIMIZE: should pass T not by reference for simple types T & Add ( const char * sKey, int iKeyLen, T & tValue ) { DWORD uHash = HASHFUNC::Hash ( sKey ) % SIZE; // find matching entry CSphMTFHashEntry * pEntry = m_pData [ uHash ]; CSphMTFHashEntry * pPrev = NULL; while ( pEntry && strcmp ( sKey, pEntry->m_sKey.cstr() ) ) { pPrev = pEntry; pEntry = pEntry->m_pNext; } if ( !pEntry ) { // not found, add it, but don't MTF pEntry = new CSphMTFHashEntry; if ( iKeyLen ) pEntry->m_sKey.SetBinary ( sKey, iKeyLen ); else pEntry->m_sKey = sKey; pEntry->m_pNext = NULL; pEntry->m_iSlot = (int)uHash; pEntry->m_tValue = tValue; if ( !pPrev ) m_pData [ uHash ] = pEntry; else pPrev->m_pNext = pEntry; } else { // MTF on access if ( pPrev ) { pPrev->m_pNext = pEntry->m_pNext; pEntry->m_pNext = m_pData [ uHash ]; m_pData [ uHash ] = pEntry; } } return pEntry->m_tValue; } /// find first non-empty entry const CSphMTFHashEntry * FindFirst () { for ( int i=0; i * FindNext ( const CSphMTFHashEntry * pEntry ) { assert ( pEntry ); if ( pEntry->m_pNext ) return pEntry->m_pNext; for ( int i=1+pEntry->m_iSlot; i ** m_pData; }; #define HASH_FOREACH(_it,_hash) \ for ( _it=_hash.FindFirst(); _it; _it=_hash.FindNext(_it) ) ///////////////////////////////////////////////////////////////////////////// struct Word_t { const char * m_sWord; int m_iCount; }; inline bool operator < ( const Word_t & a, const Word_t & b) { return a.m_iCount < b.m_iCount; }; class CSphStopwordBuilderDict : public CSphDict { public: CSphStopwordBuilderDict () {} void Save ( const char * sOutput, int iTop, bool bFreqs ); public: virtual SphWordID_t GetWordID ( BYTE * pWord ); virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool ); virtual void LoadStopwords ( const char *, ISphTokenizer * ) {} virtual bool LoadWordforms ( const char *, ISphTokenizer *, const char * ) { return true; } virtual bool SetMorphology ( const char *, bool, CSphString & ) { return true; } virtual void Setup ( const CSphDictSettings & tSettings ) { m_tSettings = tSettings; } virtual const CSphDictSettings & GetSettings () const { return m_tSettings; } virtual const CSphVector & GetStopwordsFileInfos () { return m_dSWFileInfos; } virtual const CSphSavedFile & GetWordformsFileInfo () { return m_tWFFileInfo; } virtual const CSphMultiformContainer * GetMultiWordforms () const { return NULL; } virtual bool IsStopWord ( const BYTE * ) const { return false; } protected: struct HashFunc_t { static inline DWORD Hash ( const char * sKey ) { return sphCRC32 ( (const BYTE*)sKey ); } }; protected: CSphMTFHash < int, 1048576, HashFunc_t > m_hWords; // fake setttings CSphDictSettings m_tSettings; CSphVector m_dSWFileInfos; CSphSavedFile m_tWFFileInfo; }; void CSphStopwordBuilderDict::Save ( const char * sOutput, int iTop, bool bFreqs ) { FILE * fp = fopen ( sOutput, "w+" ); if ( !fp ) return; CSphVector dTop; dTop.Reserve ( 1024 ); const CSphMTFHashEntry * it; HASH_FOREACH ( it, m_hWords ) { Word_t t; t.m_sWord = it->m_sKey.cstr(); t.m_iCount = it->m_tValue; dTop.Add ( t ); } dTop.RSort (); ARRAY_FOREACH ( i, dTop ) { if ( i>=iTop ) break; if ( bFreqs ) fprintf ( fp, "%s %d\n", dTop[i].m_sWord, dTop[i].m_iCount ); else fprintf ( fp, "%s\n", dTop[i].m_sWord ); } fclose ( fp ); } SphWordID_t CSphStopwordBuilderDict::GetWordID ( BYTE * pWord ) { int iZero = 0; m_hWords.Add ( (const char *)pWord, 0, iZero )++; return 1; } SphWordID_t CSphStopwordBuilderDict::GetWordID ( const BYTE * pWord, int iLen, bool ) { int iZero = 0; m_hWords.Add ( (const char *)pWord, iLen, iZero )++; return 1; } ///////////////////////////////////////////////////////////////////////////// void ShowProgress ( const CSphIndexProgress * pProgress, bool bPhaseEnd ) { // if in quiet mode, do not show anything at all // if in no-progress mode, only show phase ends if ( g_bQuiet || ( !g_bProgress && !bPhaseEnd ) ) return; fprintf ( stdout, "%s%c", pProgress->BuildMessage(), bPhaseEnd ? '\n' : '\r' ); fflush ( stdout ); } static void Logger ( ESphLogLevel eLevel, const char * sFmt, va_list ap ) { if ( eLevel>=SPH_LOG_DEBUG ) return; switch ( eLevel ) { case SPH_LOG_FATAL: fprintf ( stdout, "FATAL: " ); break; case SPH_LOG_WARNING: fprintf ( stdout, "WARNING: " ); break; case SPH_LOG_INFO: fprintf ( stdout, "WARNING: " ); break; case SPH_LOG_DEBUG: // yes, I know that this branch will never execute because of the condition above. case SPH_LOG_VERBOSE_DEBUG: case SPH_LOG_VERY_VERBOSE_DEBUG: fprintf ( stdout, "DEBUG: " ); break; } vfprintf ( stdout, sFmt, ap ); fprintf ( stdout, "\n" ); } ///////////////////////////////////////////////////////////////////////////// /// parse multi-valued attr definition bool ParseMultiAttr ( const char * sBuf, CSphColumnInfo & tAttr, const char * sSourceName ) { // format is as follows: // // multi-valued-attr := ATTR-TYPE ATTR-NAME 'from' SOURCE-TYPE [;QUERY] [;RANGE-QUERY] // ATTR-TYPE := 'uint' | 'timestamp' | 'bigint' // SOURCE-TYPE := 'field' | 'query' | 'ranged-query' const char * sTok = NULL; int iTokLen = -1; #define LOC_ERR(_arg,_pos) \ { \ if ( !*(_pos) ) \ fprintf ( stdout, "ERROR: source '%s': unexpected end of line in sql_attr_multi.\n", sSourceName ); \ else \ fprintf ( stdout, "ERROR: source '%s': expected " _arg " in sql_attr_multi, got '%s'.\n", sSourceName, _pos ); \ return false; \ } #define LOC_SPACE0() { while ( isspace(*sBuf) ) sBuf++; } #define LOC_SPACE1() { if ( !isspace(*sBuf) ) LOC_ERR ( "token", sBuf ) ; LOC_SPACE0(); } #define LOC_TOK() { sTok = sBuf; while ( sphIsAlpha(*sBuf) ) sBuf++; iTokLen = sBuf-sTok; } #define LOC_TOKEQ(_arg) ( iTokLen==(int)strlen(_arg) && strncasecmp ( sTok, _arg, iTokLen )==0 ) #define LOC_TEXT() { if ( *sBuf!=';') LOC_ERR ( "';'", sBuf ); sTok = ++sBuf; while ( *sBuf && *sBuf!=';' ) sBuf++; iTokLen = sBuf-sTok; } // handle ATTR-TYPE LOC_SPACE0(); LOC_TOK(); if ( LOC_TOKEQ("uint") ) tAttr.m_eAttrType = SPH_ATTR_UINT32SET; else if ( LOC_TOKEQ("timestamp") ) tAttr.m_eAttrType = SPH_ATTR_UINT32SET; else if ( LOC_TOKEQ("bigint") ) tAttr.m_eAttrType = SPH_ATTR_UINT64SET; else LOC_ERR ( "attr type ('uint' or 'timestamp' or 'bigint')", sTok ); // handle ATTR-NAME LOC_SPACE1(); LOC_TOK (); if ( iTokLen ) tAttr.m_sName.SetBinary ( sTok, iTokLen ); else LOC_ERR ( "attr name", sTok ); // handle 'from' LOC_SPACE1(); LOC_TOK(); if ( !LOC_TOKEQ("from") ) LOC_ERR ( "'from' keyword", sTok ); // handle SOURCE-TYPE LOC_SPACE1(); LOC_TOK(); LOC_SPACE0(); if ( LOC_TOKEQ("field") ) tAttr.m_eSrc = SPH_ATTRSRC_FIELD; else if ( LOC_TOKEQ("query") ) tAttr.m_eSrc = SPH_ATTRSRC_QUERY; else if ( LOC_TOKEQ("ranged-query") ) tAttr.m_eSrc = SPH_ATTRSRC_RANGEDQUERY; else LOC_ERR ( "value source type ('field', or 'query', or 'ranged-query')", sTok ); if ( tAttr.m_eSrc==SPH_ATTRSRC_FIELD ) return true; // handle QUERY LOC_TEXT(); if ( iTokLen ) tAttr.m_sQuery.SetBinary ( sTok, iTokLen ); else LOC_ERR ( "query", sTok ); if ( tAttr.m_eSrc==SPH_ATTRSRC_QUERY ) return true; // handle RANGE-QUERY LOC_TEXT(); if ( iTokLen ) tAttr.m_sQueryRange.SetBinary ( sTok, iTokLen ); else LOC_ERR ( "range query", sTok ); #undef LOC_ERR #undef LOC_SPACE0 #undef LOC_SPACE1 #undef LOC_TOK #undef LOC_TOKEQ #undef LOC_TEXT return true; } #define LOC_CHECK(_hash,_key,_msg,_add) \ if (!( _hash.Exists ( _key ) )) \ { \ fprintf ( stdout, "ERROR: key '%s' not found " _msg "\n", _key, _add ); \ return false; \ } // get string #define LOC_GETS(_arg,_key) \ if ( hSource.Exists(_key) ) \ _arg = hSource[_key]; // get int #define LOC_GETI(_arg,_key) \ if ( hSource.Exists(_key) && hSource[_key].intval() ) \ _arg = hSource[_key].intval(); // get bool #define LOC_GETB(_arg,_key) \ if ( hSource.Exists(_key) ) \ _arg = ( hSource[_key].intval()!=0 ); // get array of strings #define LOC_GETA(_arg,_key) \ for ( CSphVariant * pVal = hSource(_key); pVal; pVal = pVal->m_pNext ) \ _arg.Add ( pVal->cstr() ); void SqlAttrsConfigure ( CSphSourceParams_SQL & tParams, const CSphVariant * pHead, ESphAttr eAttrType, const char * sSourceName, bool bIndexedAttr=false ) { for ( const CSphVariant * pCur = pHead; pCur; pCur= pCur->m_pNext ) { CSphColumnInfo tCol ( pCur->cstr(), eAttrType ); char * pColon = strchr ( const_cast ( tCol.m_sName.cstr() ), ':' ); if ( pColon ) { *pColon = '\0'; if ( eAttrType==SPH_ATTR_INTEGER ) { int iBits = strtol ( pColon+1, NULL, 10 ); if ( iBits<=0 || iBits>ROWITEM_BITS ) { fprintf ( stdout, "WARNING: source '%s': attribute '%s': invalid bitcount=%d (bitcount ignored)\n", sSourceName, tCol.m_sName.cstr(), iBits ); iBits = -1; } tCol.m_tLocator.m_iBitCount = iBits; } else { fprintf ( stdout, "WARNING: source '%s': attribute '%s': bitcount is only supported for integer types\n", sSourceName, tCol.m_sName.cstr() ); } } tParams.m_dAttrs.Add ( tCol ); if ( bIndexedAttr ) tParams.m_dAttrs.Last().m_bIndexed = true; } } #if USE_ZLIB bool ConfigureUnpack ( CSphVariant * pHead, ESphUnpackFormat eFormat, CSphSourceParams_SQL & tParams, const char * sSourceName ) { for ( CSphVariant * pVal = pHead; pVal; pVal = pVal->m_pNext ) { CSphUnpackInfo & tUnpack = tParams.m_dUnpack.Add(); tUnpack.m_sName = CSphString ( pVal->cstr() ); tUnpack.m_eFormat = eFormat; } return true; } #else bool ConfigureUnpack ( CSphVariant * pHead, ESphUnpackFormat, CSphSourceParams_SQL &, const char * sSourceName ) { if ( pHead ) { fprintf ( stdout, "ERROR: source '%s': unpack is not supported, rebuild with zlib\n", sSourceName ); return false; } return true; } #endif // USE_ZLIB bool ParseJoinedField ( const char * sBuf, CSphJoinedField * pField, const char * sSourceName ) { // sanity checks assert ( pField ); if ( !sBuf || !sBuf[0] ) { fprintf ( stdout, "ERROR: source '%s': sql_joined_field must not be empty.\n", sSourceName ); return false; } #define LOC_ERR(_exp) \ { \ fprintf ( stdout, "ERROR: source '%s': expected " _exp " in sql_joined_field, got '%s'.\n", sSourceName, sBuf ); \ return false; \ } #define LOC_TEXT() { if ( *sBuf!=';') LOC_ERR ( "';'" ); sTmp = ++sBuf; while ( *sBuf && *sBuf!=';' ) sBuf++; iTokLen = sBuf-sTmp; } // parse field name while ( isspace(*sBuf) ) sBuf++; const char * sName = sBuf; while ( sphIsAlpha(*sBuf) ) sBuf++; if ( sBuf==sName ) LOC_ERR ( "field name" ); pField->m_sName.SetBinary ( sName, sBuf-sName ); if ( !isspace(*sBuf) ) LOC_ERR ( "space" ); while ( isspace(*sBuf) ) sBuf++; // parse 'from' if ( strncasecmp ( sBuf, "from", 4 ) ) LOC_ERR ( "'from'" ); sBuf += 4; if ( !isspace(*sBuf) ) LOC_ERR ( "space" ); while ( isspace(*sBuf) ) sBuf++; bool bGotRanged = false; pField->m_bPayload = false; // parse 'query' if ( strncasecmp ( sBuf, "payload-query", 13 )==0 ) { pField->m_bPayload = true; sBuf += 13; } else if ( strncasecmp ( sBuf, "query", 5 )==0 ) { sBuf += 5; } else if ( strncasecmp ( sBuf, "ranged-query", 12 )==0 ) { bGotRanged = true; sBuf += 12; } else LOC_ERR ( "'query'" ); // parse ';' while ( isspace(*sBuf) && *sBuf!=';' ) sBuf++; if ( *sBuf!=';' ) LOC_ERR ( "';'" ); // handle QUERY const char * sTmp = sBuf; int iTokLen = 0; LOC_TEXT(); if ( iTokLen ) pField->m_sQuery.SetBinary ( sTmp, iTokLen ); else LOC_ERR ( "query" ); if ( !bGotRanged ) return true; // handle RANGE-QUERY LOC_TEXT(); if ( iTokLen ) pField->m_sRanged.SetBinary ( sTmp, iTokLen ); else LOC_ERR ( "range query" ); #undef LOC_ERR #undef LOC_TEXT return true; } bool SqlParamsConfigure ( CSphSourceParams_SQL & tParams, const CSphConfigSection & hSource, const char * sSourceName ) { if ( !hSource.Exists("odbc_dsn") ) // in case of odbc source, the host, user, pass and db are not mandatory, since they may be already defined in dsn string. { LOC_CHECK ( hSource, "sql_host", "in source '%s'", sSourceName ); LOC_CHECK ( hSource, "sql_user", "in source '%s'", sSourceName ); LOC_CHECK ( hSource, "sql_pass", "in source '%s'", sSourceName ); LOC_CHECK ( hSource, "sql_db", "in source '%s'", sSourceName ); } LOC_CHECK ( hSource, "sql_query", "in source '%s'", sSourceName ); LOC_GETS ( tParams.m_sHost, "sql_host" ); LOC_GETS ( tParams.m_sUser, "sql_user" ); LOC_GETS ( tParams.m_sPass, "sql_pass" ); LOC_GETS ( tParams.m_sDB, "sql_db" ); LOC_GETI ( tParams.m_iPort, "sql_port" ); LOC_GETS ( tParams.m_sQuery, "sql_query" ); LOC_GETA ( tParams.m_dQueryPre, "sql_query_pre" ); LOC_GETA ( tParams.m_dQueryPost, "sql_query_post" ); LOC_GETS ( tParams.m_sQueryRange, "sql_query_range" ); LOC_GETA ( tParams.m_dQueryPostIndex, "sql_query_post_index" ); LOC_GETI ( tParams.m_iRangeStep, "sql_range_step" ); LOC_GETS ( tParams.m_sQueryKilllist, "sql_query_killlist" ); LOC_GETI ( tParams.m_iRangedThrottle, "sql_ranged_throttle" ); SqlAttrsConfigure ( tParams, hSource("sql_group_column"), SPH_ATTR_INTEGER, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_date_column"), SPH_ATTR_TIMESTAMP, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_str2ordinal_column"), SPH_ATTR_ORDINAL, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_uint"), SPH_ATTR_INTEGER, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_timestamp"), SPH_ATTR_TIMESTAMP, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_str2ordinal"), SPH_ATTR_ORDINAL, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_bool"), SPH_ATTR_BOOL, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_float"), SPH_ATTR_FLOAT, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_bigint"), SPH_ATTR_BIGINT, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_string"), SPH_ATTR_STRING, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_attr_str2wordcount"), SPH_ATTR_WORDCOUNT, sSourceName ); SqlAttrsConfigure ( tParams, hSource("sql_field_string"), SPH_ATTR_STRING, sSourceName, true ); SqlAttrsConfigure ( tParams, hSource("sql_field_str2wordcount"), SPH_ATTR_STRING, sSourceName, true ); LOC_GETA ( tParams.m_dFileFields, "sql_file_field" ); tParams.m_iMaxFileBufferSize = g_iMaxFileFieldBuffer; tParams.m_iRefRangeStep = tParams.m_iRangeStep; tParams.m_eOnFileFieldError = g_eOnFileFieldError; // unpack if ( !ConfigureUnpack ( hSource("unpack_zlib"), SPH_UNPACK_ZLIB, tParams, sSourceName ) ) return false; if ( !ConfigureUnpack ( hSource("unpack_mysqlcompress"), SPH_UNPACK_MYSQL_COMPRESS, tParams, sSourceName ) ) return false; tParams.m_uUnpackMemoryLimit = hSource.GetSize ( "unpack_mysqlcompress_maxsize", 16777216 ); // parse multi-attrs for ( CSphVariant * pVal = hSource("sql_attr_multi"); pVal; pVal = pVal->m_pNext ) { CSphColumnInfo tAttr; if ( !ParseMultiAttr ( pVal->cstr(), tAttr, sSourceName ) ) return false; tParams.m_dAttrs.Add ( tAttr ); } // parse joined fields for ( CSphVariant * pVal = hSource("sql_joined_field"); pVal; pVal = pVal->m_pNext ) if ( !ParseJoinedField ( pVal->cstr(), &tParams.m_dJoinedFields.Add(), sSourceName ) ) return false; // make sure attr names are unique ARRAY_FOREACH ( i, tParams.m_dAttrs ) for ( int j = i + 1; j < tParams.m_dAttrs.GetLength(); j++ ) { const CSphString & sName = tParams.m_dAttrs[i].m_sName; if ( sName==tParams.m_dAttrs[j].m_sName ) { fprintf ( stdout, "ERROR: duplicate attribute name: %s\n", sName.cstr() ); return false; } } // additional checks if ( tParams.m_iRangedThrottle<0 ) { fprintf ( stdout, "WARNING: sql_ranged_throttle must not be negative; throttling disabled\n" ); tParams.m_iRangedThrottle = 0; } // debug printer if ( g_bPrintQueries ) tParams.m_bPrintQueries = true; return true; } #if USE_PGSQL CSphSource * SpawnSourcePgSQL ( const CSphConfigSection & hSource, const char * sSourceName ) { assert ( hSource["type"]=="pgsql" ); CSphSourceParams_PgSQL tParams; if ( !SqlParamsConfigure ( tParams, hSource, sSourceName ) ) return NULL; LOC_GETS ( tParams.m_sClientEncoding, "sql_client_encoding" ); CSphSource_PgSQL * pSrcPgSQL = new CSphSource_PgSQL ( sSourceName ); if ( !pSrcPgSQL->Setup ( tParams ) ) SafeDelete ( pSrcPgSQL ); return pSrcPgSQL; } #endif // USE_PGSQL #if USE_MYSQL CSphSource * SpawnSourceMySQL ( const CSphConfigSection & hSource, const char * sSourceName ) { assert ( hSource["type"]=="mysql" ); CSphSourceParams_MySQL tParams; if ( !SqlParamsConfigure ( tParams, hSource, sSourceName ) ) return NULL; LOC_GETS ( tParams.m_sUsock, "sql_sock" ); LOC_GETI ( tParams.m_iFlags, "mysql_connect_flags" ); LOC_GETS ( tParams.m_sSslKey, "mysql_ssl_key" ); LOC_GETS ( tParams.m_sSslCert, "mysql_ssl_cert" ); LOC_GETS ( tParams.m_sSslCA, "mysql_ssl_ca" ); CSphSource_MySQL * pSrcMySQL = new CSphSource_MySQL ( sSourceName ); if ( !pSrcMySQL->Setup ( tParams ) ) SafeDelete ( pSrcMySQL ); return pSrcMySQL; } #endif // USE_MYSQL #if USE_ODBC CSphSource * SpawnSourceODBC ( const CSphConfigSection & hSource, const char * sSourceName ) { assert ( hSource["type"]=="odbc" ); CSphSourceParams_ODBC tParams; if ( !SqlParamsConfigure ( tParams, hSource, sSourceName ) ) return NULL; LOC_GETS ( tParams.m_sOdbcDSN, "odbc_dsn" ); LOC_GETS ( tParams.m_sColBuffers, "sql_column_buffers" ); CSphSource_ODBC * pSrc = new CSphSource_ODBC ( sSourceName ); if ( !pSrc->Setup ( tParams ) ) SafeDelete ( pSrc ); return pSrc; } CSphSource * SpawnSourceMSSQL ( const CSphConfigSection & hSource, const char * sSourceName ) { assert ( hSource["type"]=="mssql" ); CSphSourceParams_ODBC tParams; if ( !SqlParamsConfigure ( tParams, hSource, sSourceName ) ) return NULL; LOC_GETB ( tParams.m_bWinAuth, "mssql_winauth" ); LOC_GETB ( tParams.m_bUnicode, "mssql_unicode" ); LOC_GETS ( tParams.m_sColBuffers, "sql_column_buffers" ); CSphSource_MSSQL * pSrc = new CSphSource_MSSQL ( sSourceName ); if ( !pSrc->Setup ( tParams ) ) SafeDelete ( pSrc ); return pSrc; } #endif // USE_ODBC CSphSource * SpawnSourceXMLPipe ( const CSphConfigSection & hSource, const char * sSourceName, bool bUTF8 ) { assert ( hSource["type"]=="xmlpipe" || hSource["type"]=="xmlpipe2" ); LOC_CHECK ( hSource, "xmlpipe_command", "in source '%s'.", sSourceName ); CSphSource * pSrcXML = NULL; CSphString sCommand = hSource["xmlpipe_command"]; const int MAX_BUF_SIZE = 1024; BYTE dBuffer [MAX_BUF_SIZE]; int iBufSize = 0; bool bUsePipe2 = true; FILE * pPipe = sphDetectXMLPipe ( sCommand.cstr (), dBuffer, iBufSize, MAX_BUF_SIZE, bUsePipe2 ); if ( !pPipe ) { fprintf ( stdout, "ERROR: xmlpipe: failed to popen '%s'", sCommand.cstr() ); return NULL; } if ( bUsePipe2 ) { #if USE_LIBEXPAT || USE_LIBXML pSrcXML = sphCreateSourceXmlpipe2 ( &hSource, pPipe, dBuffer, iBufSize, sSourceName, g_iMaxXmlpipe2Field ); if ( !bUTF8 ) { SafeDelete ( pSrcXML ); fprintf ( stdout, "ERROR: source '%s': xmlpipe2 should only be used with charset_type=utf-8\n", sSourceName ); } #else fprintf ( stdout, "WARNING: source '%s': xmlpipe2 support NOT compiled in. To use xmlpipe2, install missing XML libraries, reconfigure, and rebuild Sphinx\n", sSourceName ); #endif } else { CSphSource_XMLPipe * pXmlPipe = new CSphSource_XMLPipe ( dBuffer, iBufSize, sSourceName ); if ( !pXmlPipe->Setup ( pPipe, sCommand.cstr () ) ) SafeDelete ( pXmlPipe ); pSrcXML = pXmlPipe; } return pSrcXML; } CSphSource * SpawnSource ( const CSphConfigSection & hSource, const char * sSourceName, bool bUTF8, bool bWordDict ) { if ( !hSource.Exists ( "type" ) ) { fprintf ( stdout, "ERROR: source '%s': type not found; skipping.\n", sSourceName ); return NULL; } #if USE_PGSQL if ( hSource["type"]=="pgsql" ) return SpawnSourcePgSQL ( hSource, sSourceName ); #endif #if USE_MYSQL if ( hSource["type"]=="mysql" ) return SpawnSourceMySQL ( hSource, sSourceName ); #endif #if USE_ODBC if ( hSource["type"]=="odbc" ) return SpawnSourceODBC ( hSource, sSourceName ); if ( hSource["type"]=="mssql" ) return SpawnSourceMSSQL ( hSource, sSourceName ); #endif if ( hSource["type"]=="xmlpipe" && bWordDict ) { fprintf ( stdout, "ERROR: source '%s': type xmlpipe incompatible with dict=keywords option use xmlpipe2 instead; skipping.\n", sSourceName ); return NULL; } if ( hSource["type"]=="xmlpipe" || hSource["type"]=="xmlpipe2" ) return SpawnSourceXMLPipe ( hSource, sSourceName, bUTF8 ); fprintf ( stdout, "ERROR: source '%s': unknown type '%s'; skipping.\n", sSourceName, hSource["type"].cstr() ); return NULL; } #undef LOC_CHECK #undef LOC_GETS #undef LOC_GETI #undef LOC_GETA ////////////////////////////////////////////////////////////////////////// // INDEXING ////////////////////////////////////////////////////////////////////////// bool DoIndex ( const CSphConfigSection & hIndex, const char * sIndexName, const CSphConfigType & hSources, bool bVerbose, FILE * fpDumpRows ) { // check index type bool bPlain = true; if ( hIndex("type") ) { const CSphString & sType = hIndex["type"]; bPlain = ( sType=="plain" ); if ( sType!="plain" && sType!="distributed" && sType!="rt" ) { fprintf ( stdout, "ERROR: index '%s': unknown type '%s'; fix your config file.\n", sIndexName, sType.cstr() ); fflush ( stdout ); return false; } } if ( !bPlain ) { if ( !g_bQuiet ) { fprintf ( stdout, "skipping non-plain index '%s'...\n", sIndexName ); fflush ( stdout ); } return false; } // progress bar if ( !g_bQuiet ) { fprintf ( stdout, "indexing index '%s'...\n", sIndexName ); fflush ( stdout ); } // check config if ( !hIndex("path") ) { fprintf ( stdout, "ERROR: index '%s': key 'path' not found.\n", sIndexName ); return false; } bool bInfix = hIndex.GetInt ( "min_infix_len", 0 ) > 0; if ( ( hIndex.GetInt ( "min_prefix_len", 0 ) > 0 || bInfix ) && hIndex.GetInt ( "enable_star" )==0 ) { const char * szMorph = hIndex.GetStr ( "morphology", "" ); if ( szMorph && *szMorph && strcmp ( szMorph, "none" ) ) { fprintf ( stdout, "ERROR: index '%s': infixes and morphology are enabled, enable_star=0\n", sIndexName ); return false; } } /////////////////// // spawn tokenizer /////////////////// CSphString sError; CSphTokenizerSettings tTokSettings; if ( !sphConfTokenizer ( hIndex, tTokSettings, sError ) ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); ISphTokenizer * pTokenizer = ISphTokenizer::Create ( tTokSettings, sError ); if ( !pTokenizer ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); // enable sentence indexing on tokenizer // (not in Create() because search time tokenizer does not care) bool bIndexSP = ( hIndex.GetInt ( "index_sp" )!=0 ); if ( bIndexSP ) if ( !pTokenizer->EnableSentenceIndexing ( sError ) ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); if ( hIndex("index_zones") ) if ( !pTokenizer->EnableZoneIndexing ( sError ) ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); CSphDict * pDict = NULL; CSphDictSettings tDictSettings; if ( !g_sBuildStops ) { ISphTokenizer * pTokenFilter = NULL; sphConfDictionary ( hIndex, tDictSettings ); // FIXME! no support for infixes in keywords dict yet if ( tDictSettings.m_bWordDict && bInfix ) { tDictSettings.m_bWordDict = false; fprintf ( stdout, "WARNING: min_infix_len is not supported yet with dict=keywords; using dict=crc\n" ); } pDict = tDictSettings.m_bWordDict ? sphCreateDictionaryKeywords ( tDictSettings, pTokenizer, sError, sIndexName ) : sphCreateDictionaryCRC ( tDictSettings, pTokenizer, sError, sIndexName ); if ( !pDict ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); if ( !sError.IsEmpty () ) fprintf ( stdout, "WARNING: index '%s': %s\n", sIndexName, sError.cstr() ); pTokenFilter = ISphTokenizer::CreateTokenFilter ( pTokenizer, pDict->GetMultiWordforms () ); pTokenizer = pTokenFilter ? pTokenFilter : pTokenizer; } // boundary bool bInplaceEnable = hIndex.GetInt ( "inplace_enable", 0 )!=0; int iHitGap = hIndex.GetSize ( "inplace_hit_gap", 0 ); int iDocinfoGap = hIndex.GetSize ( "inplace_docinfo_gap", 0 ); float fRelocFactor = hIndex.GetFloat ( "inplace_reloc_factor", 0.1f ); float fWriteFactor = hIndex.GetFloat ( "inplace_write_factor", 0.1f ); if ( bInplaceEnable ) { if ( fRelocFactor < 0.01f || fRelocFactor > 0.9f ) { fprintf ( stdout, "WARNING: inplace_reloc_factor must be 0.01 to 0.9, clamped\n" ); fRelocFactor = Min ( Max ( fRelocFactor, 0.01f ), 0.9f ); } if ( fWriteFactor < 0.01f || fWriteFactor > 0.9f ) { fprintf ( stdout, "WARNING: inplace_write_factor must be 0.01 to 0.9, clamped\n" ); fWriteFactor = Min ( Max ( fWriteFactor, 0.01f ), 0.9f ); } if ( fWriteFactor+fRelocFactor > 1.0f ) { fprintf ( stdout, "WARNING: inplace_write_factor+inplace_reloc_factor must be less than 0.9, scaled\n" ); float fScale = 0.9f/(fWriteFactor+fRelocFactor); fRelocFactor *= fScale; fWriteFactor *= fScale; } } ///////////////////// // spawn datasources ///////////////////// // check for per-index HTML stipping override bool bStripOverride = false; bool bHtmlStrip = false; CSphString sHtmlIndexAttrs, sHtmlRemoveElements; if ( hIndex("html_strip") ) { bStripOverride = true; bHtmlStrip = hIndex.GetInt ( "html_strip" )!=0; sHtmlIndexAttrs = hIndex.GetStr ( "html_index_attrs" ); sHtmlRemoveElements = hIndex.GetStr ( "html_remove_elements" ); } else { if ( bIndexSP ) sphWarning ( "index '%s': index_sp=1 requires html_strip=1 to index paragraphs", sIndexName ); if ( hIndex("index_zones") ) sphDie ( "index '%s': index_zones requires html_strip=1", sIndexName ); } // parse all sources CSphVector dSources; bool bGotAttrs = false; bool bGotJoinedFields = false; bool bSpawnFailed = false; for ( CSphVariant * pSourceName = hIndex("source"); pSourceName; pSourceName = pSourceName->m_pNext ) { if ( !hSources ( pSourceName->cstr() ) ) { fprintf ( stdout, "ERROR: index '%s': source '%s' not found.\n", sIndexName, pSourceName->cstr() ); continue; } const CSphConfigSection & hSource = hSources [ pSourceName->cstr() ]; CSphSource * pSource = SpawnSource ( hSource, pSourceName->cstr(), pTokenizer->IsUtf8 (), tDictSettings.m_bWordDict ); if ( !pSource ) { bSpawnFailed = true; continue; } if ( pSource->HasAttrsConfigured() ) bGotAttrs = true; if ( pSource->HasJoinedFields() ) bGotJoinedFields = true; // strip_html, index_html_attrs CSphString sError; if ( bStripOverride ) { // apply per-index overrides if ( bHtmlStrip ) { if ( !pSource->SetStripHTML ( sHtmlIndexAttrs.cstr(), sHtmlRemoveElements.cstr(), bIndexSP, hIndex.GetStr("index_zones"), sError ) ) { fprintf ( stdout, "ERROR: source '%s': %s.\n", pSourceName->cstr(), sError.cstr() ); return false; } } } else if ( hSource.GetInt ( "strip_html" ) ) { // apply deprecated per-source settings if there are no overrides if ( !pSource->SetStripHTML ( hSource.GetStr ( "index_html_attrs" ), "", false, NULL, sError ) ) { fprintf ( stdout, "ERROR: source '%s': %s.\n", pSourceName->cstr(), sError.cstr() ); return false; } } pSource->SetTokenizer ( pTokenizer ); pSource->SetDumpRows ( fpDumpRows ); dSources.Add ( pSource ); } if ( bSpawnFailed ) { fprintf ( stdout, "ERROR: index '%s': failed to configure some of the sources, will not index.\n", sIndexName ); return false; } if ( !dSources.GetLength() ) { fprintf ( stdout, "ERROR: index '%s': no valid sources configured; skipping.\n", sIndexName ); return false; } /////////// // do work /////////// int64_t tmTime = sphMicroTimer(); bool bOK = false; if ( g_sBuildStops ) { /////////////////// // build stopwords /////////////////// if ( !g_bQuiet ) { fprintf ( stdout, "building stopwords list...\n" ); fflush ( stdout ); } CSphStopwordBuilderDict tDict; ARRAY_FOREACH ( i, dSources ) { CSphString sError; dSources[i]->SetDict ( &tDict ); if ( !dSources[i]->Connect ( sError ) || !dSources[i]->IterateStart ( sError ) ) { if ( !sError.IsEmpty() ) fprintf ( stdout, "ERROR: index '%s': %s\n", sIndexName, sError.cstr() ); continue; } while ( dSources[i]->IterateDocument ( sError ) && dSources[i]->m_tDocInfo.m_iDocID ) while ( dSources[i]->IterateHits ( sError ) ) { } } tDict.Save ( g_sBuildStops, g_iTopStops, g_bBuildFreqs ); SafeDelete ( pTokenizer ); } else { ////////// // index! ////////// // if searchd is running, we want to reindex to .tmp files CSphString sIndexPath; sIndexPath.SetSprintf ( g_bRotate ? "%s.tmp" : "%s", hIndex["path"].cstr() ); // do index CSphIndex * pIndex = sphCreateIndexPhrase ( sIndexName, sIndexPath.cstr() ); assert ( pIndex ); // check lock file if ( !pIndex->Lock() ) { fprintf ( stdout, "FATAL: %s, will not index. Try --rotate option.\n", pIndex->GetLastError().cstr() ); exit ( 1 ); } CSphString sError; CSphIndexSettings tSettings; if ( !sphConfIndex ( hIndex, tSettings, sError ) ) sphDie ( "index '%s': %s.", sIndexName, sError.cstr() ); tSettings.m_bVerbose = bVerbose; if ( tSettings.m_bIndexExactWords && !pDict->HasMorphology () ) { tSettings.m_bIndexExactWords = false; fprintf ( stdout, "WARNING: index '%s': no morphology, index_exact_words=1 has no effect, ignoring\n", sIndexName ); } if ( tDictSettings.m_bWordDict && pDict->HasMorphology() && tSettings.m_iMinPrefixLen && !tSettings.m_bIndexExactWords ) { tSettings.m_bIndexExactWords = true; fprintf ( stdout, "WARNING: index '%s': dict=keywords and prefixes and morphology enabled, forcing index_exact_words=1\n", sIndexName ); } if ( bGotAttrs && tSettings.m_eDocinfo==SPH_DOCINFO_NONE ) { fprintf ( stdout, "FATAL: index '%s': got attributes, but docinfo is 'none' (fix your config file).\n", sIndexName ); exit ( 1 ); } if ( bGotJoinedFields && tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) { fprintf ( stdout, "FATAL: index '%s': got joined fields, but docinfo is 'inline' (fix your config file).\n", sIndexName ); exit ( 1 ); } pIndex->SetProgressCallback ( ShowProgress ); if ( bInplaceEnable ) pIndex->SetInplaceSettings ( iHitGap, iDocinfoGap, fRelocFactor, fWriteFactor ); pIndex->SetTokenizer ( pTokenizer ); pIndex->SetDictionary ( pDict ); pIndex->Setup ( tSettings ); bOK = pIndex->Build ( dSources, g_iMemLimit, g_iWriteBuffer )!=0; if ( bOK && g_bRotate ) { sIndexPath.SetSprintf ( "%s.new", hIndex["path"].cstr() ); bOK = pIndex->Rename ( sIndexPath.cstr() ); } if ( !bOK ) fprintf ( stdout, "ERROR: index '%s': %s.\n", sIndexName, pIndex->GetLastError().cstr() ); if ( !pIndex->GetLastWarning().IsEmpty() ) fprintf ( stdout, "WARNING: index '%s': %s.\n", sIndexName, pIndex->GetLastWarning().cstr() ); pIndex->Unlock (); SafeDelete ( pIndex ); } // trip report tmTime = sphMicroTimer() - tmTime; if ( !g_bQuiet ) { tmTime = Max ( tmTime, 1 ); int64_t iTotalDocs = 0; int64_t iTotalBytes = 0; ARRAY_FOREACH ( i, dSources ) { const CSphSourceStats & tSource = dSources[i]->GetStats(); iTotalDocs += tSource.m_iTotalDocuments; iTotalBytes += tSource.m_iTotalBytes; } fprintf ( stdout, "total %d docs, "INT64_FMT" bytes\n", (int)iTotalDocs, iTotalBytes ); fprintf ( stdout, "total %d.%03d sec, %d bytes/sec, %d.%02d docs/sec\n", (int)(tmTime/1000000), (int)(tmTime%1000000)/1000, // sec (int)(iTotalBytes*1000000/tmTime), // bytes/sec (int)(iTotalDocs*1000000/tmTime), (int)(iTotalDocs*1000000*100/tmTime)%100 ); // docs/sec } // cleanup and go on ARRAY_FOREACH ( i, dSources ) SafeDelete ( dSources[i] ); return bOK; } ////////////////////////////////////////////////////////////////////////// // MERGING ////////////////////////////////////////////////////////////////////////// bool DoMerge ( const CSphConfigSection & hDst, const char * sDst, const CSphConfigSection & hSrc, const char * sSrc, CSphVector & tPurge, bool bRotate, bool bMergeKillLists ) { // progress bar if ( !g_bQuiet ) { fprintf ( stdout, "merging index '%s' into index '%s'...\n", sSrc, sDst ); fflush ( stdout ); } // check config if ( !hDst("path") ) { fprintf ( stdout, "ERROR: index '%s': key 'path' not found.\n", sDst ); return false; } if ( !hSrc("path") ) { fprintf ( stdout, "ERROR: index '%s': key 'path' not found.\n", sSrc ); return false; } // do the merge CSphIndex * pSrc = sphCreateIndexPhrase ( NULL, hSrc["path"].cstr() ); CSphIndex * pDst = sphCreateIndexPhrase ( NULL, hDst["path"].cstr() ); assert ( pSrc ); assert ( pDst ); CSphString sError; if ( !sphFixupIndexSettings ( pSrc, hSrc, sError ) ) { fprintf ( stdout, "ERROR: index '%s': %s\n", sSrc, sError.cstr () ); return false; } if ( !sphFixupIndexSettings ( pDst, hDst, sError ) ) { fprintf ( stdout, "ERROR: index '%s': %s\n", sDst, sError.cstr () ); return false; } pSrc->SetWordlistPreload ( hSrc.GetInt ( "ondisk_dict" )==0 ); pDst->SetWordlistPreload ( hDst.GetInt ( "ondisk_dict" )==0 ); if ( !pSrc->Lock() && !bRotate ) { fprintf ( stdout, "ERROR: index '%s' is already locked; lock: %s\n", sSrc, pSrc->GetLastError().cstr() ); return false; } if ( !pDst->Lock() && !bRotate ) { fprintf ( stdout, "ERROR: index '%s' is already locked; lock: %s\n", sDst, pDst->GetLastError().cstr() ); return false; } pDst->SetProgressCallback ( ShowProgress ); int64_t tmMergeTime = sphMicroTimer(); if ( !pDst->Merge ( pSrc, tPurge, bMergeKillLists ) ) sphDie ( "failed to merge index '%s' into index '%s': %s", sSrc, sDst, pDst->GetLastError().cstr() ); if ( !pDst->GetLastWarning().IsEmpty() ) fprintf ( stdout, "WARNING: index '%s': %s\n", sDst, pDst->GetLastWarning().cstr() ); tmMergeTime = sphMicroTimer() - tmMergeTime; if ( !g_bQuiet ) printf ( "merged in %d.%03d sec\n", (int)(tmMergeTime/1000000), (int)(tmMergeTime%1000000)/1000 ); // pick up merge result const char * sPath = hDst["path"].cstr(); char sFrom [ SPH_MAX_FILENAME_LEN ]; char sTo [ SPH_MAX_FILENAME_LEN ]; struct stat tFileInfo; int iExt; for ( iExt=0; iExtUnlock(); pDst->Unlock(); } SafeDelete ( pSrc ); SafeDelete ( pDst ); // all good? return ( iExt==EXT_COUNT ); } ////////////////////////////////////////////////////////////////////////// // ENTRY ////////////////////////////////////////////////////////////////////////// void ReportIOStats ( const char * sType, int iReads, int64_t iReadTime, int64_t iReadBytes ) { if ( iReads==0 ) { fprintf ( stdout, "total %d %s, %d.%03d sec, 0.0 kb/call avg, 0.0 msec/call avg\n", iReads, sType, (int)(iReadTime/1000000), (int)(iReadTime%1000000)/1000 ); } else { iReadBytes /= iReads; fprintf ( stdout, "total %d %s, %d.%03d sec, %d.%d kb/call avg, %d.%d msec/call avg\n", iReads, sType, (int)(iReadTime/1000000), (int)(iReadTime%1000000)/1000, (int)(iReadBytes/1024), (int)(iReadBytes%1024)*10/1024, (int)(iReadTime/iReads/1000), (int)(iReadTime/iReads/100)%10 ); } } extern int64_t g_iIndexerCurrentDocID; extern int64_t g_iIndexerCurrentHits; extern int64_t g_iIndexerCurrentRangeMin; extern int64_t g_iIndexerCurrentRangeMax; extern int64_t g_iIndexerPoolStartDocID; extern int64_t g_iIndexerPoolStartHit; #if !USE_WINDOWS void sigsegv ( int sig ) { sphSafeInfo ( STDERR_FILENO, "*** Oops, indexer crashed! Please send the following report to developers." ); sphSafeInfo ( STDERR_FILENO, "Sphinx " SPHINX_VERSION ); sphSafeInfo ( STDERR_FILENO, "-------------- report begins here ---------------" ); sphSafeInfo ( STDERR_FILENO, "Current document: docid=%l, hits=%l", g_iIndexerCurrentDocID, g_iIndexerCurrentHits ); sphSafeInfo ( STDERR_FILENO, "Current batch: minid=%l, maxid=%l", g_iIndexerCurrentRangeMin, g_iIndexerCurrentRangeMax ); sphSafeInfo ( STDERR_FILENO, "Hit pool start: docid=%l, hit=%l", g_iIndexerPoolStartDocID, g_iIndexerPoolStartHit ); sphBacktrace ( STDERR_FILENO ); CRASH_EXIT; } void SetSignalHandlers () { struct sigaction sa; sigfillset ( &sa.sa_mask ); bool bSignalsSet = false; for ( ;; ) { sa.sa_flags = SA_NOCLDSTOP; sa.sa_handler = SIG_IGN; if ( sigaction ( SIGCHLD, &sa, NULL )!=0 ) break; sa.sa_flags |= SA_RESETHAND; sa.sa_handler = sigsegv; if ( sigaction ( SIGSEGV, &sa, NULL )!=0 ) break; sa.sa_handler = sigsegv; if ( sigaction ( SIGBUS, &sa, NULL )!=0 ) break; sa.sa_handler = sigsegv; if ( sigaction ( SIGABRT, &sa, NULL )!=0 ) break; sa.sa_handler = sigsegv; if ( sigaction ( SIGILL, &sa, NULL )!=0 ) break; sa.sa_handler = sigsegv; if ( sigaction ( SIGFPE, &sa, NULL )!=0 ) break; bSignalsSet = true; break; } if ( !bSignalsSet ) { fprintf ( stderr, "sigaction(): %s", strerror(errno) ); exit ( 1 ); } } #else // if USE_WINDOWS LONG WINAPI sigsegv ( EXCEPTION_POINTERS * pExc ) { const char * sFail1 = "*** Oops, indexer crashed! Please send "; const char * sFail2 = " minidump file to developers.\n"; const char * sFailVer = "Sphinx " SPHINX_VERSION "\n"; sphBacktrace ( pExc, g_sMinidump ); ::write ( STDERR_FILENO, sFail1, strlen(sFail1) ); ::write ( STDERR_FILENO, g_sMinidump, strlen(g_sMinidump) ); ::write ( STDERR_FILENO, sFail2, strlen(sFail2) ); ::write ( STDERR_FILENO, sFailVer, strlen(sFailVer) ); CRASH_EXIT; } void SetSignalHandlers () { snprintf ( g_sMinidump, sizeof(g_sMinidump), "indexer.%d.mdmp", GetCurrentProcessId() ); SetUnhandledExceptionFilter ( sigsegv ); } #endif // USE_WINDOWS bool SendRotate ( int iPID, bool bForce ) { if ( iPID<0 ) return false; if ( !( g_bRotate && ( g_bRotateEach || bForce ) ) ) return false; #if USE_WINDOWS char szPipeName[64]; snprintf ( szPipeName, sizeof(szPipeName), "\\\\.\\pipe\\searchd_%d", iPID ); HANDLE hPipe = INVALID_HANDLE_VALUE; while ( hPipe==INVALID_HANDLE_VALUE ) { hPipe = CreateFile ( szPipeName, GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, NULL ); if ( hPipe==INVALID_HANDLE_VALUE ) { if ( GetLastError()!=ERROR_PIPE_BUSY ) { fprintf ( stdout, "WARNING: could not open pipe (GetLastError()=%d)\n", GetLastError () ); return false; } if ( !WaitNamedPipe ( szPipeName, 1000 ) ) { fprintf ( stdout, "WARNING: could not open pipe (GetLastError()=%d)\n", GetLastError () ); return false; } } } if ( hPipe!=INVALID_HANDLE_VALUE ) { DWORD uWritten = 0; BYTE uWrite = 0; BOOL bResult = WriteFile ( hPipe, &uWrite, 1, &uWritten, NULL ); if ( bResult ) fprintf ( stdout, "rotating indices: succesfully sent SIGHUP to searchd (pid=%d).\n", iPID ); else fprintf ( stdout, "WARNING: failed to send SIGHUP to searchd (pid=%d, GetLastError()=%d)\n", iPID, GetLastError () ); CloseHandle ( hPipe ); } #else // signal int iErr = kill ( iPID, SIGHUP ); if ( iErr==0 ) { if ( !g_bQuiet ) fprintf ( stdout, "rotating indices: succesfully sent SIGHUP to searchd (pid=%d).\n", iPID ); } else { switch ( errno ) { case ESRCH: fprintf ( stdout, "WARNING: no process found by PID %d.\n", iPID ); break; case EPERM: fprintf ( stdout, "WARNING: access denied to PID %d.\n", iPID ); break; default: fprintf ( stdout, "WARNING: kill() error: %s.\n", strerror(errno) ); break; } return false; } #endif // all ok return true; } int main ( int argc, char ** argv ) { sphSetLogger ( Logger ); const char * sOptConfig = NULL; bool bMerge = false; CSphVector dMergeDstFilters; CSphVector dIndexes; bool bIndexAll = false; bool bMergeKillLists = false; bool bVerbose = false; CSphString sDumpRows; int i; for ( i=1; i='a' && argv[i][0]<='z' ) || ( argv[i][0]>='A' && argv[i][0]<='Z' ) ) { dIndexes.Add ( argv[i] ); } else if ( strcasecmp ( argv[i], "--dump-rows" )==0 && (i+1)1 ) { fprintf ( stdout, "ERROR: malformed or unknown option near '%s'.\n", argv[i] ); } else { fprintf ( stdout, "Usage: indexer [OPTIONS] [indexname1 [indexname2 [...]]]\n" "\n" "Options are:\n" "--config \t\tread configuration from specified file\n" "\t\t\t(default is sphinx.conf)\n" "--all\t\t\treindex all configured indexes\n" "--quiet\t\t\tbe quiet, only print errors\n" "--verbose\t\tverbose indexing issues report\n" "--noprogress\t\tdo not display progress\n" "\t\t\t(automatically on if output is not to a tty)\n" "--rotate\t\tsend SIGHUP to searchd when indexing is over\n" "\t\t\tto rotate updated indexes automatically\n" "--sighup-each\t\tsend SIGHUP to searchd after each index\n" "\t\t\t(used with --rotate only)\n" "--buildstops \n" "\t\t\tbuild top N stopwords and write them to given file\n" "--buildfreqs\t\tstore words frequencies to output.txt\n" "\t\t\t(used with --buildstops only)\n" "--merge \n" "\t\t\tmerge 'src-index' into 'dst-index'\n" "\t\t\t'dst-index' will receive merge result\n" "\t\t\t'src-index' will not be modified\n" "--merge-dst-range \n" "\t\t\tfilter 'dst-index' on merge, keep only those documents\n" "\t\t\twhere 'attr' is between 'min' and 'max' (inclusive)\n" "--merge-klists\n" "--merge-killlists\tmerge src and dst kill-lists (default is to\n" "\t\t\tapply src kill-list to dst index)\n" "--dump-rows \tdump indexed rows into FILE\n" "--print-queries\t\tprint SQL queries (for debugging)\n" "\n" "Examples:\n" "indexer --quiet myidx1\treindex 'myidx1' defined in 'sphinx.conf'\n" "indexer --all\t\treindex all indexes defined in 'sphinx.conf'\n" ); } return 1; } if ( !bMerge && !bIndexAll && !dIndexes.GetLength() ) { fprintf ( stdout, "ERROR: nothing to do.\n" ); return 1; } SetSignalHandlers(); /////////////// // load config /////////////// CSphConfigParser cp; CSphConfig & hConf = cp.m_tConf; sOptConfig = sphLoadConfig ( sOptConfig, g_bQuiet, cp ); if ( !hConf ( "source" ) ) sphDie ( "no indexes found in config file '%s'", sOptConfig ); g_iMemLimit = 0; if ( hConf("indexer") && hConf["indexer"]("indexer") ) { CSphConfigSection & hIndexer = hConf["indexer"]["indexer"]; g_iMemLimit = hIndexer.GetSize ( "mem_limit", 0 ); g_iMaxXmlpipe2Field = hIndexer.GetSize ( "max_xmlpipe2_field", 2*1024*1024 ); g_iWriteBuffer = hIndexer.GetSize ( "write_buffer", 1024*1024 ); g_iMaxFileFieldBuffer = Max ( 1024*1024, hIndexer.GetSize ( "max_file_field_buffer", 8*1024*1024 ) ); if ( hIndexer("on_file_field_error") ) { const CSphString & sVal = hIndexer["on_file_field_error"]; if ( sVal=="ignore_field" ) g_eOnFileFieldError = FFE_IGNORE_FIELD; else if ( sVal=="skip_document" ) g_eOnFileFieldError = FFE_SKIP_DOCUMENT; else if ( sVal=="fail_index" ) g_eOnFileFieldError = FFE_FAIL_INDEX; else sphDie ( "unknown on_field_field_error value (must be one of ignore_field, skip_document, fail_index)" ); } sphSetThrottling ( hIndexer.GetInt ( "max_iops", 0 ), hIndexer.GetSize ( "max_iosize", 0 ) ); } int iPID = -1; while ( g_bRotate ) { // load config if ( !hConf.Exists ( "searchd" ) ) { fprintf ( stdout, "WARNING: 'searchd' section not found in config file.\n" ); break; } const CSphConfigSection & hSearchd = hConf["searchd"]["searchd"]; if ( !hSearchd.Exists ( "pid_file" ) ) { fprintf ( stdout, "WARNING: 'pid_file' parameter not found in 'searchd' config section.\n" ); break; } // read in PID FILE * fp = fopen ( hSearchd["pid_file"].cstr(), "r" ); if ( !fp ) { fprintf ( stdout, "WARNING: failed to open pid_file '%s'.\n", hSearchd["pid_file"].cstr() ); break; } if ( fscanf ( fp, "%d", &iPID )!=1 || iPID<=0 ) { fprintf ( stdout, "WARNING: failed to scanf pid from pid_file '%s'.\n", hSearchd["pid_file"].cstr() ); break; } fclose ( fp ); break; } ///////////////////// // index each index //////////////////// FILE * fpDumpRows = NULL; if ( !bMerge && !sDumpRows.IsEmpty() ) { fpDumpRows = fopen ( sDumpRows.cstr(), "wb+" ); if ( !fpDumpRows ) sphDie ( "failed to open %s: %s", sDumpRows.cstr(), strerror(errno) ); } sphStartIOStats (); bool bIndexedOk = false; // if any of the indexes are ok if ( bMerge ) { if ( dIndexes.GetLength()!=2 ) sphDie ( "there must be 2 indexes to merge specified" ); if ( !hConf["index"](dIndexes[0]) ) sphDie ( "no merge destination index '%s'", dIndexes[0] ); if ( !hConf["index"](dIndexes[1]) ) sphDie ( "no merge source index '%s'", dIndexes[1] ); bIndexedOk = DoMerge ( hConf["index"][dIndexes[0]], dIndexes[0], hConf["index"][dIndexes[1]], dIndexes[1], dMergeDstFilters, g_bRotate, bMergeKillLists ); } else if ( bIndexAll ) { uint64_t tmRotated = sphMicroTimer(); hConf["index"].IterateStart (); while ( hConf["index"].IterateNext() ) { bool bLastOk = DoIndex ( hConf["index"].IterateGet (), hConf["index"].IterateGetKey().cstr(), hConf["source"], bVerbose, fpDumpRows ); bIndexedOk |= bLastOk; if ( bLastOk && ( sphMicroTimer() - tmRotated > ROTATE_MIN_INTERVAL ) && SendRotate ( iPID, false ) ) tmRotated = sphMicroTimer(); } } else { uint64_t tmRotated = sphMicroTimer(); ARRAY_FOREACH ( i, dIndexes ) { if ( !hConf["index"](dIndexes[i]) ) fprintf ( stdout, "WARNING: no such index '%s', skipping.\n", dIndexes[i] ); else { bool bLastOk = DoIndex ( hConf["index"][dIndexes[i]], dIndexes[i], hConf["source"], bVerbose, fpDumpRows ); bIndexedOk |= bLastOk; if ( bLastOk && ( sphMicroTimer() - tmRotated > ROTATE_MIN_INTERVAL ) && SendRotate ( iPID, false ) ) tmRotated = sphMicroTimer(); } } } sphShutdownWordforms (); const CSphIOStats & tStats = sphStopIOStats (); if ( !g_bQuiet ) { ReportIOStats ( "reads", tStats.m_iReadOps, tStats.m_iReadTime, tStats.m_iReadBytes ); ReportIOStats ( "writes", tStats.m_iWriteOps, tStats.m_iWriteTime, tStats.m_iWriteBytes ); } //////////////////////////// // rotating searchd indices //////////////////////////// if ( bIndexedOk && g_bRotate ) { if ( !SendRotate ( iPID, true ) ) fprintf ( stdout, "WARNING: indices NOT rotated.\n" ); } #if SPH_DEBUG_LEAKS sphAllocsStats (); #endif return bIndexedOk ? 0 : 1; } // // $Id: indexer.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxselect.y0000644000176700017710000000603211714101214017426 0ustar deogardeogar%{ #if USE_WINDOWS #pragma warning(push,1) #pragma warning(disable:4702) // unreachable code #endif %} %lex-param { SelectParser_t * pParser } %parse-param { SelectParser_t * pParser } %pure-parser %error-verbose %token SEL_TOKEN %token SEL_ID %token SEL_AS %token SEL_AVG %token SEL_MAX %token SEL_MIN %token SEL_SUM %token SEL_COUNT %token SEL_WEIGHT %token SEL_DISTINCT %token TOK_NEG %token TOK_LTE %token TOK_GTE %token TOK_EQ %token TOK_NE %token TOK_CONST_STRING %left TOK_OR %left TOK_AND %left '|' %left '&' %left TOK_EQ TOK_NE %left '<' '>' TOK_LTE TOK_GTE %left '+' '-' %left '*' '/' %nonassoc TOK_NEG %nonassoc TOK_NOT %% select_list: select_item | select_list ',' select_item ; select_item: '*' { pParser->AddItem ( &$1 ); } | select_expr opt_alias opt_alias: // empty | SEL_TOKEN { pParser->AliasLastItem ( &$1 ); } | SEL_AS SEL_TOKEN { pParser->AliasLastItem ( &$2 ); } ; select_expr: expr { pParser->AddItem ( &$1 ); } | SEL_AVG '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_AVG, &$1, &$4 ); } | SEL_MAX '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_MAX, &$1, &$4 ); } | SEL_MIN '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_MIN, &$1, &$4 ); } | SEL_SUM '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_SUM, &$1, &$4 ); } | SEL_COUNT '(' '*' ')' { pParser->AddItem ( "count(*)", &$1, &$4 ); } | SEL_WEIGHT '(' ')' { pParser->AddItem ( "weight()", &$1, &$3 ); } | SEL_COUNT '(' SEL_DISTINCT SEL_TOKEN ')' // FIXME: may be check if $4 == this->m_sGroupDistinct and warn/error, if not? { pParser->AddItem ( "@distinct", &$1, &$5 ); } ; expr: select_atom | '-' expr %prec TOK_NEG { $$ = $1; $$.m_iEnd = $2.m_iEnd; } | TOK_NOT expr { $$ = $1; $$.m_iEnd = $2.m_iEnd; } | expr '+' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '-' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '*' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '/' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '<' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '>' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '|' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '&' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_LTE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_GTE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_EQ expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_NE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_AND expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_OR expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | '(' expr ')' { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | function ; select_atom : SEL_ID | SEL_TOKEN function: SEL_TOKEN '(' arglist ')' { $$ = $1; $$.m_iEnd = $4.m_iEnd; } | SEL_TOKEN '(' ')' { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | SEL_MIN '(' expr ',' expr ')' { $$ = $1; $$.m_iEnd = $6.m_iEnd; } // handle clash with aggregate functions | SEL_MAX '(' expr ',' expr ')' { $$ = $1; $$.m_iEnd = $6.m_iEnd; } ; arglist: arg | arglist ',' arg ; arg: expr | TOK_CONST_STRING ; %% #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxrt.h0000644000176700017710000000601111723172513016562 0ustar deogardeogar// // $Id: sphinxrt.h 3125 2012-02-28 15:39:55Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxrt_ #define _sphinxrt_ #include "sphinx.h" #include "sphinxutils.h" /// RAM based updateable backend interface class ISphRtIndex : public CSphIndex { public: explicit ISphRtIndex ( const char * sIndexName, const char * sName ) : CSphIndex ( sIndexName, sName ) {} /// get internal schema (to use for Add calls) virtual const CSphSchema & GetInternalSchema () const { return m_tSchema; } /// insert/update document in current txn /// fails in case of two open txns to different indexes virtual bool AddDocument ( int iFields, const char ** ppFields, const CSphMatch & tDoc, bool bReplace, const char ** ppStr, const CSphVector & dMvas, CSphString & sError ) = 0; /// insert/update document in current txn /// fails in case of two open txns to different indexes virtual bool AddDocument ( ISphHits * pHits, const CSphMatch & tDoc, const char ** ppStr, const CSphVector & dMvas, CSphString & sError ) = 0; /// delete document in current txn /// fails in case of two open txns to different indexes virtual bool DeleteDocument ( const SphDocID_t * pDocs, int iDocs, CSphString & sError ) = 0; /// commit pending changes virtual void Commit () = 0; /// undo pending changes virtual void RollBack () = 0; /// check and periodically flush RAM chunk to disk virtual void CheckRamFlush () = 0; /// forcibly flush RAM chunk to disk virtual void ForceRamFlush ( bool bPeriodic=false ) = 0; /// forcibly save RAM chunk as a new disk chunk virtual void ForceDiskChunk () = 0; /// attach a disk chunk to current index virtual bool AttachDiskIndex ( CSphIndex * pIndex, CSphString & sError ) = 0; }; /// initialize subsystem class CSphConfigSection; void sphRTInit (); void sphRTConfigure ( const CSphConfigSection & hSearchd, bool bTestMode ); bool sphRTSchemaConfigure ( const CSphConfigSection & hIndex, CSphSchema * pSchema, CSphString * pError ); /// deinitialize subsystem void sphRTDone (); /// RT index factory ISphRtIndex * sphCreateIndexRT ( const CSphSchema & tSchema, const char * sIndexName, DWORD uRamSize, const char * sPath, bool bKeywordDict ); /// Get current txn index ISphRtIndex * sphGetCurrentIndexRT(); typedef void ProgressCallbackSimple_t (); ////////////////////////////////////////////////////////////////////////// enum ESphBinlogReplayFlags { SPH_REPLAY_ACCEPT_DESC_TIMESTAMP = 1 }; /// replay stored binlog void sphReplayBinlog ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, ProgressCallbackSimple_t * pfnProgressCallback=NULL ); #endif // _sphinxrt_ // // $Id: sphinxrt.h 3125 2012-02-28 15:39:55Z shodan $ // sphinx-2.0.4-release/src/sphinxstem.h0000644000176700017710000000243711711621267017117 0ustar deogardeogar// // $Id: sphinxstem.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxstem_ #define _sphinxstem_ #include "sphinx.h" /// initialize English stemmar void stem_en_init (); /// initialize Russian stemmar void stem_ru_init (); /// stem lowercase English word void stem_en ( BYTE * pWord, int iLen ); /// stem lowercase Russian word in Windows-1251 encoding void stem_ru_cp1251 ( BYTE * pWord ); /// stem lowercase Russian word in UTF-8 encoding void stem_ru_utf8 ( WORD * pWord ); /// initialize Czech stemmer void stem_cz_init (); /// stem lowercase Czech word void stem_cz ( BYTE * pWord ); /// calculate soundex in-place if the word is lowercase English letters only; /// do nothing if it's not void stem_soundex ( BYTE * pWord ); /// double metaphone stemmer void stem_dmetaphone ( BYTE * pWord, bool bUTF8 ); #endif // _sphinxstem_ // // $Id: sphinxstem.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/search.cpp0000644000176700017710000003343211711621267016514 0ustar deogardeogar// // $Id: search.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxutils.h" #include "sphinxint.h" #include #define CONF_CHECK(_hash,_key,_msg,_add) \ if (!( _hash.Exists ( _key ) )) \ { \ fprintf ( stdout, "ERROR: key '%s' not found " _msg, _key, _add ); \ continue; \ } const char * myctime ( DWORD uStamp ) { static char sBuf[256]; time_t tStamp = uStamp; // for 64-bit strncpy ( sBuf, ctime ( &tStamp ), sizeof(sBuf) ); char * p = sBuf; while ( (*p) && (*p)!='\n' && (*p)!='\r' ) p++; *p = '\0'; return sBuf; } int main ( int argc, char ** argv ) { fprintf ( stdout, SPHINX_BANNER ); if ( argc<=1 ) { fprintf ( stdout, "Usage: search [OPTIONS] \n" "\n" "Options are:\n" "-c, --config \tuse given config file instead of defaults\n" "-i, --index \tsearch given index only (default: all indexes)\n" "-a, --any\t\tmatch any query word (default: match all words)\n" "-b, --boolean\t\tmatch in boolean mode\n" "-p, --phrase\t\tmatch exact phrase\n" "-e, --extended\t\tmatch in extended mode\n" "-f, --filter \tonly match if attribute attr value is v\n" "-s, --sortby \tsort matches by 'CLAUSE' in sort_extended mode\n" "-S, --sortexpr \tsort matches by 'EXPR' DESC in sort_expr mode\n" "-o, --offset \tprint matches starting from this offset (default: 0)\n" "-l, --limit \tprint this many matches (default: 20)\n" "-q, --noinfo\t\tdon't print document info from SQL database\n" "-g, --group \tgroup by attribute named attr\n" "-gs,--groupsort \tsort groups by \n" "--sort=date\t\tsort by date, descending\n" "--rsort=date\t\tsort by date, ascending\n" "--sort=ts\t\tsort by time segments\n" "--stdin\t\t\tread query from stdin\n" "\n" "This program (CLI search) is for testing and debugging purposes only;\n" "it is NOT intended for production use.\n" ); exit ( 0 ); } /////////////////////////////////////////// // get query and other commandline options /////////////////////////////////////////// CSphQuery tQuery; char sQuery [ 1024 ]; sQuery[0] = '\0'; const char * sOptConfig = NULL; const char * sIndex = NULL; bool bNoInfo = false; bool bStdin = false; int iStart = 0; int iLimit = 20; #define OPT(_a1,_a2) else if ( !strcmp(argv[i],_a1) || !strcmp(argv[i],_a2) ) #define OPT1(_a1) else if ( !strcmp(argv[i],_a1) ) int i; for ( i=1; i=argc ) break; OPT ( "-o", "--offset" ) iStart = atoi ( argv[++i] ); OPT ( "-l", "--limit" ) iLimit = atoi ( argv[++i] ); OPT ( "-c", "--config" ) sOptConfig = argv[++i]; OPT ( "-i", "--index" ) sIndex = argv[++i]; OPT ( "-g", "--group" ) { tQuery.m_eGroupFunc = SPH_GROUPBY_ATTR; tQuery.m_sGroupBy = argv[++i]; } OPT ( "-gs","--groupsort" ) { tQuery.m_sGroupSortBy = argv[++i]; } // NOLINT OPT ( "-s", "--sortby" ) { tQuery.m_eSort = SPH_SORT_EXTENDED; tQuery.m_sSortBy = argv[++i]; } OPT ( "-S", "--sortexpr" ) { tQuery.m_eSort = SPH_SORT_EXPR; tQuery.m_sSortBy = argv[++i]; } else if ( (i+2)>=argc ) break; OPT ( "-f", "--filter" ) { DWORD uVal = strtoul ( argv[i+2], NULL, 10 ); tQuery.m_dFilters.Reset (); tQuery.m_dFilters.Resize ( 1 ); tQuery.m_dFilters[0].m_eType = SPH_FILTER_VALUES; tQuery.m_dFilters[0].m_dValues.Reset (); tQuery.m_dFilters[0].m_dValues.Add ( uVal ); tQuery.m_dFilters[0].m_sAttrName = argv[i+1]; i += 2; } else break; // unknown option } else if ( strlen(sQuery) + strlen(argv[i]) + 1 < sizeof(sQuery) ) { // this is a search term strcat ( sQuery, argv[i] ); // NOLINT strcat ( sQuery, " " ); // NOLINT } } iStart = Max ( iStart, 0 ); iLimit = Max ( iLimit, 0 ); if ( i!=argc ) { fprintf ( stdout, "ERROR: malformed or unknown option near '%s'.\n", argv[i] ); return 1; } #undef OPT if ( bStdin ) { int iPos = 0, iLeft = sizeof(sQuery)-1; char sThrowaway [ 256 ]; while ( !feof(stdin) ) { if ( iLeft>0 ) { int iLen = fread ( sQuery, 1, iLeft, stdin ); iPos += iLen; iLeft -= iLen; } else { int iDummy; // to avoid gcc unused result warning iDummy = fread ( sThrowaway, 1, sizeof(sThrowaway), stdin ); } } assert ( iPos<(int)sizeof(sQuery) ); sQuery[iPos] = '\0'; } ///////////// // configure ///////////// tQuery.m_iMaxMatches = Max ( 1000, iStart + iLimit ); CSphConfigParser cp; CSphConfig & hConf = cp.m_tConf; sphLoadConfig ( sOptConfig, false, cp ); ///////////////////// // search each index ///////////////////// hConf["index"].IterateStart (); while ( hConf["index"].IterateNext () ) { const CSphConfigSection & hIndex = hConf["index"].IterateGet (); const char * sIndexName = hConf["index"].IterateGetKey().cstr(); if ( sIndex && strcmp ( sIndex, sIndexName ) ) continue; if ( hIndex("type") && hIndex["type"]=="distributed" ) continue; if ( !hIndex.Exists ( "path" ) ) sphDie ( "key 'path' not found in index '%s'", sIndexName ); CSphString sError; // do we want to show document info from database? #if USE_MYSQL MYSQL tSqlDriver; const char * sQueryInfo = NULL; while ( !bNoInfo ) { if ( !hIndex("source") || !hConf("source") || !hConf["source"]( hIndex["source"] ) ) break; const CSphConfigSection & hSource = hConf["source"][ hIndex["source"] ]; if ( !hSource("type") || hSource["type"]!="mysql" || !hSource("sql_host") || !hSource("sql_user") || !hSource("sql_db") || !hSource("sql_pass") || !hSource("sql_query_info") ) { break; } sQueryInfo = hSource["sql_query_info"].cstr(); if ( !strstr ( sQueryInfo, "$id" ) ) sphDie ( "'sql_query_info' value must contain '$id'" ); int iPort = 3306; if ( hSource.Exists ( "sql_port" ) && hSource["sql_port"].intval() ) iPort = hSource["sql_port"].intval(); mysql_init ( &tSqlDriver ); if ( !mysql_real_connect ( &tSqlDriver, hSource["sql_host"].cstr(), hSource["sql_user"].cstr(), hSource["sql_pass"].cstr(), hSource["sql_db"].cstr(), iPort, hSource.Exists ( "sql_sock" ) ? hSource["sql_sock"].cstr() : NULL, 0 ) ) { sphDie ( "failed to connect to MySQL (error=%s)", mysql_error ( &tSqlDriver ) ); } // all good break; } #endif ////////// // search ////////// tQuery.m_sQuery = sQuery; CSphQueryResult * pResult = NULL; CSphIndex * pIndex = sphCreateIndexPhrase ( NULL, hIndex["path"].cstr() ); pIndex->SetEnableStar ( hIndex.GetInt("enable_star")!=0 ); pIndex->SetWordlistPreload ( hIndex.GetInt("ondisk_dict")==0 ); CSphString sWarning; sError = "could not create index (check that files exist)"; for ( ; pIndex; ) { if ( !pIndex->Prealloc ( false, false, sWarning ) || !pIndex->Preread() ) { sError = pIndex->GetLastError (); break; } const CSphSchema * pSchema = &pIndex->GetMatchSchema(); if ( !sWarning.IsEmpty () ) fprintf ( stdout, "WARNING: index '%s': %s\n", sIndexName, sWarning.cstr () ); // handle older index versions (<9) if ( !sphFixupIndexSettings ( pIndex, hIndex, sError ) ) sphDie ( "index '%s': %s", sIndexName, sError.cstr() ); // lookup first timestamp if needed // FIXME! remove this? if ( tQuery.m_eSort!=SPH_SORT_RELEVANCE && tQuery.m_eSort!=SPH_SORT_EXTENDED && tQuery.m_eSort!=SPH_SORT_EXPR ) { int iTS = -1; for ( int i=0; iGetAttrsCount(); i++ ) if ( pSchema->GetAttr(i).m_eAttrType==SPH_ATTR_TIMESTAMP ) { tQuery.m_sSortBy = pSchema->GetAttr(i).m_sName; iTS = i; break; } if ( iTS<0 ) { fprintf ( stdout, "index '%s': no timestamp attributes found, sorting by relevance.\n", sIndexName ); tQuery.m_eSort = SPH_SORT_RELEVANCE; } } // do querying ISphMatchSorter * pTop = sphCreateQueue ( &tQuery, pIndex->GetMatchSchema(), sError ); if ( !pTop ) { sError.SetSprintf ( "failed to create sorting queue: %s", sError.cstr() ); break; } pResult = new CSphQueryResult(); if ( !pIndex->MultiQuery ( &tQuery, pResult, 1, &pTop, NULL ) ) { // failure; pull that error message sError = pIndex->GetLastError(); SafeDelete ( pResult ); } else { // success; fold them matches pResult->m_dMatches.Reset (); pResult->m_iTotalMatches += pTop->GetTotalCount(); pResult->m_tSchema = pTop->GetSchema(); sphFlattenQueue ( pTop, pResult, 0 ); } SafeDelete ( pTop ); break; } ///////// // print ///////// if ( !pResult ) { fprintf ( stdout, "index '%s': search error: %s.\n", sIndexName, sError.cstr() ); return 1; } fprintf ( stdout, "index '%s': query '%s': returned %d matches of "INT64_FMT" total in %d.%03d sec\n", sIndexName, sQuery, pResult->m_dMatches.GetLength(), pResult->m_iTotalMatches, pResult->m_iQueryTime/1000, pResult->m_iQueryTime%1000 ); if ( !pResult->m_sWarning.IsEmpty() ) fprintf ( stdout, "WARNING: %s\n", pResult->m_sWarning.cstr() ); if ( pResult->m_dMatches.GetLength() ) { fprintf ( stdout, "\ndisplaying matches:\n" ); int iMaxIndex = Min ( iStart+iLimit, pResult->m_dMatches.GetLength() ); for ( int i=iStart; im_dMatches[i]; fprintf ( stdout, "%d. document=" DOCID_FMT ", weight=%d", 1+i, tMatch.m_iDocID, tMatch.m_iWeight ); for ( int j=0; jm_tSchema.GetAttrsCount(); j++ ) { const CSphColumnInfo & tAttr = pResult->m_tSchema.GetAttr(j); fprintf ( stdout, ", %s=", tAttr.m_sName.cstr() ); if ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET || tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) { fprintf ( stdout, "(" ); SphAttr_t iIndex = tMatch.GetAttr ( tAttr.m_tLocator ); if ( iIndex ) { const DWORD * pValues = pResult->m_pMva + iIndex; int iValues = *pValues++; if ( tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) { assert ( ( iValues%2 )==0 ); for ( int k=0; km_pStrings + tMatch.GetAttr ( tAttr.m_tLocator ), &pStr ); fwrite ( pStr, 1, iLen, stdout ); break; } default: fprintf ( stdout, "(unknown-type-%d)", tAttr.m_eAttrType ); } } fprintf ( stdout, "\n" ); #if USE_MYSQL if ( sQueryInfo ) { char * sQuery = sphStrMacro ( sQueryInfo, "$id", tMatch.m_iDocID ); const char * sError = NULL; #define LOC_MYSQL_ERROR(_arg) { sError = _arg; break; } for ( ;; ) { if ( mysql_query ( &tSqlDriver, sQuery ) ) LOC_MYSQL_ERROR ( "mysql_query" ); MYSQL_RES * pSqlResult = mysql_use_result ( &tSqlDriver ); if ( !pSqlResult ) LOC_MYSQL_ERROR ( "mysql_use_result" ); MYSQL_ROW tRow = mysql_fetch_row ( pSqlResult ); if ( !tRow ) { fprintf ( stdout, "\t(document not found in db)\n" ); break; } for ( int iField=0; iField<(int)pSqlResult->field_count; iField++ ) fprintf ( stdout, "\t%s=%s\n", ( pSqlResult->fields && pSqlResult->fields[iField].name ) ? pSqlResult->fields[iField].name : "(NULL)", tRow[iField] ? tRow[iField] : "(NULL)" ); mysql_free_result ( pSqlResult ); break; } if ( sError ) sphDie ( "sql_query_info: %s: %s", sError, mysql_error ( &tSqlDriver ) ); delete [] sQuery; } #endif } } fprintf ( stdout, "\nwords:\n" ); pResult->m_hWordStats.IterateStart(); int iWord = 1; while ( pResult->m_hWordStats.IterateNext() ) { const CSphQueryResultMeta::WordStat_t & tStat = pResult->m_hWordStats.IterateGet(); fprintf ( stdout, "%d. '%s': "INT64_FMT" documents, "INT64_FMT" hits\n", iWord, pResult->m_hWordStats.IterateGetKey().cstr(), tStat.m_iDocs, tStat.m_iHits ); iWord++; } fprintf ( stdout, "\n" ); /////////// // cleanup /////////// SafeDelete ( pIndex ); } sphShutdownWordforms (); } // // $Id: search.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/yysphinxexpr.c0000644000176700017710000013426511712074354017507 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_CONST_INT = 258, TOK_CONST_FLOAT = 259, TOK_CONST_STRING = 260, TOK_ATTR_INT = 261, TOK_ATTR_BITS = 262, TOK_ATTR_FLOAT = 263, TOK_ATTR_MVA32 = 264, TOK_ATTR_MVA64 = 265, TOK_ATTR_STRING = 266, TOK_FUNC = 267, TOK_FUNC_IN = 268, TOK_USERVAR = 269, TOK_UDF = 270, TOK_HOOK_IDENT = 271, TOK_HOOK_FUNC = 272, TOK_ATID = 273, TOK_ATWEIGHT = 274, TOK_ID = 275, TOK_WEIGHT = 276, TOK_COUNT = 277, TOK_DISTINCT = 278, TOK_CONST_LIST = 279, TOK_ATTR_SINT = 280, TOK_OR = 281, TOK_AND = 282, TOK_NE = 283, TOK_EQ = 284, TOK_GTE = 285, TOK_LTE = 286, TOK_MOD = 287, TOK_DIV = 288, TOK_NOT = 289, TOK_NEG = 290 }; #endif #define TOK_CONST_INT 258 #define TOK_CONST_FLOAT 259 #define TOK_CONST_STRING 260 #define TOK_ATTR_INT 261 #define TOK_ATTR_BITS 262 #define TOK_ATTR_FLOAT 263 #define TOK_ATTR_MVA32 264 #define TOK_ATTR_MVA64 265 #define TOK_ATTR_STRING 266 #define TOK_FUNC 267 #define TOK_FUNC_IN 268 #define TOK_USERVAR 269 #define TOK_UDF 270 #define TOK_HOOK_IDENT 271 #define TOK_HOOK_FUNC 272 #define TOK_ATID 273 #define TOK_ATWEIGHT 274 #define TOK_ID 275 #define TOK_WEIGHT 276 #define TOK_COUNT 277 #define TOK_DISTINCT 278 #define TOK_CONST_LIST 279 #define TOK_ATTR_SINT 280 #define TOK_OR 281 #define TOK_AND 282 #define TOK_NE 283 #define TOK_EQ 284 #define TOK_GTE 285 #define TOK_LTE 286 #define TOK_MOD 287 #define TOK_DIV 288 #define TOK_NOT 289 #define TOK_NEG 290 /* Copy the first part of user declarations. */ /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 0 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 1 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef union YYSTYPE { int64_t iConst; // constant value float fConst; // constant value uint64_t iAttrLocator; // attribute locator (rowitem for int/float; offset+size for bits) int iFunc; // function id int iNode; // node, or uservar, or udf index } YYSTYPE; /* Line 186 of yacc.c. */ # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #if ! defined (yyoverflow) || YYERROR_VERBOSE /* The parser invokes alloca or malloc; define the necessary symbols. */ # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # else # ifndef YYSTACK_USE_ALLOCA # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC malloc # define YYSTACK_FREE free # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 30 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 280 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 48 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 10 /* YYNRULES -- Number of rules. */ #define YYNRULES 58 /* YYNRULES -- Number of states. */ #define YYNSTATES 105 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 290 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 40, 29, 2, 45, 46, 38, 36, 47, 37, 2, 39, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 32, 2, 33, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 28, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 30, 31, 34, 35, 41, 42, 43, 44 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned char yyprhs[] = { 0, 0, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 35, 37, 40, 43, 47, 51, 55, 59, 63, 67, 71, 75, 79, 83, 87, 91, 95, 99, 103, 107, 111, 115, 117, 119, 121, 123, 125, 127, 131, 133, 135, 139, 143, 145, 147, 152, 156, 161, 165, 172, 179, 186 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yysigned_char yyrhs[] = { 49, 0, -1, 52, -1, 6, -1, 7, -1, 8, -1, 50, -1, 9, -1, 10, -1, 50, -1, 57, -1, 3, -1, 4, -1, 18, -1, 19, -1, 20, -1, 21, 45, 46, -1, 16, -1, 37, 52, -1, 43, 52, -1, 52, 36, 52, -1, 52, 37, 52, -1, 52, 38, 52, -1, 52, 39, 52, -1, 52, 32, 52, -1, 52, 33, 52, -1, 52, 29, 52, -1, 52, 28, 52, -1, 52, 40, 52, -1, 52, 42, 52, -1, 52, 41, 52, -1, 52, 35, 52, -1, 52, 34, 52, -1, 52, 31, 52, -1, 52, 30, 52, -1, 52, 27, 52, -1, 52, 26, 52, -1, 45, 52, 46, -1, 52, -1, 11, -1, 9, -1, 10, -1, 5, -1, 53, -1, 54, 47, 53, -1, 3, -1, 4, -1, 55, 47, 3, -1, 55, 47, 4, -1, 55, -1, 14, -1, 12, 45, 54, 46, -1, 12, 45, 46, -1, 15, 45, 54, 46, -1, 15, 45, 46, -1, 13, 45, 51, 47, 56, 46, -1, 13, 45, 20, 47, 55, 46, -1, 13, 45, 18, 47, 55, 46, -1, 17, 45, 54, 46, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned char yyrline[] = { 0, 62, 62, 66, 67, 68, 72, 73, 74, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 110, 111, 112, 113, 114, 118, 119, 123, 124, 125, 126, 130, 131, 135, 136, 137, 138, 139, 143, 147, 151 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "TOK_CONST_INT", "TOK_CONST_FLOAT", "TOK_CONST_STRING", "TOK_ATTR_INT", "TOK_ATTR_BITS", "TOK_ATTR_FLOAT", "TOK_ATTR_MVA32", "TOK_ATTR_MVA64", "TOK_ATTR_STRING", "TOK_FUNC", "TOK_FUNC_IN", "TOK_USERVAR", "TOK_UDF", "TOK_HOOK_IDENT", "TOK_HOOK_FUNC", "TOK_ATID", "TOK_ATWEIGHT", "TOK_ID", "TOK_WEIGHT", "TOK_COUNT", "TOK_DISTINCT", "TOK_CONST_LIST", "TOK_ATTR_SINT", "TOK_OR", "TOK_AND", "'|'", "'&'", "TOK_NE", "TOK_EQ", "'<'", "'>'", "TOK_GTE", "TOK_LTE", "'+'", "'-'", "'*'", "'/'", "'%'", "TOK_MOD", "TOK_DIV", "TOK_NOT", "TOK_NEG", "'('", "')'", "','", "$accept", "exprline", "attr", "attr_mva", "expr", "arg", "arglist", "constlist", "constlist_or_uservar", "function", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 124, 38, 283, 284, 60, 62, 285, 286, 43, 45, 42, 47, 37, 287, 288, 289, 290, 40, 41, 44 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 48, 49, 50, 50, 50, 51, 51, 51, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 53, 53, 53, 53, 53, 54, 54, 55, 55, 55, 55, 56, 56, 57, 57, 57, 57, 57, 57, 57, 57 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 3, 1, 1, 3, 3, 1, 1, 4, 3, 4, 3, 6, 6, 6, 4 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 0, 11, 12, 3, 4, 5, 0, 0, 0, 17, 0, 13, 14, 15, 0, 0, 0, 0, 0, 9, 2, 10, 0, 0, 0, 0, 0, 18, 19, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 40, 41, 39, 52, 38, 43, 0, 7, 8, 0, 0, 6, 0, 54, 0, 0, 16, 37, 36, 35, 27, 26, 34, 33, 24, 25, 32, 31, 20, 21, 22, 23, 28, 30, 29, 51, 0, 0, 0, 0, 53, 58, 44, 45, 46, 0, 0, 50, 49, 0, 57, 0, 56, 55, 47, 48 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yysigned_char yydefgoto[] = { -1, 18, 19, 61, 53, 54, 55, 94, 98, 21 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -44 static const short yypact[] = { 176, -44, -44, -44, -44, -44, -27, -25, 14, -44, 44, -44, -44, -44, 58, 176, 176, 176, 85, -44, 217, -44, 45, 1, 89, 133, 79, -44, -44, 196, -44, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, -44, -44, -44, -44, -44, 217, -44, -43, -44, -44, 84, 86, -44, 100, -44, -41, -33, -44, -44, 233, 39, 82, 125, 166, 166, -14, -14, -14, -14, 238, 238, -44, -44, -44, -44, -44, -44, 133, 26, 26, -2, -44, -44, -44, -44, -44, 37, 40, -44, 126, 128, -44, 124, -44, -44, -44, -44 }; /* YYPGOTO[NTERM-NUM]. */ static const short yypgoto[] = { -44, -44, 148, -44, 0, 87, 105, 81, -44, -44 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -1 static const unsigned char yytable[] = { 20, 92, 93, 84, 85, 89, 85, 3, 4, 5, 56, 57, 96, 90, 85, 27, 28, 29, 22, 58, 23, 59, 41, 42, 43, 44, 45, 46, 47, 92, 93, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 1, 2, 48, 3, 4, 5, 49, 50, 51, 6, 7, 24, 8, 9, 10, 11, 12, 13, 14, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 15, 99, 100, 30, 101, 100, 16, 25, 17, 52, 1, 2, 48, 3, 4, 5, 49, 50, 51, 6, 7, 26, 8, 9, 10, 11, 12, 13, 14, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 65, 15, 103, 104, 63, 64, 86, 16, 87, 17, 62, 1, 2, 48, 3, 4, 5, 49, 50, 51, 6, 7, 88, 8, 9, 10, 11, 12, 13, 14, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 95, 97, 15, 60, 91, 100, 102, 0, 16, 0, 17, 1, 2, 0, 3, 4, 5, 0, 0, 0, 6, 7, 0, 8, 9, 10, 11, 12, 13, 14, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 16, 0, 17, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 0, 0, 0, 66, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 43, 44, 45, 46, 47 }; static const yysigned_char yycheck[] = { 0, 3, 4, 46, 47, 46, 47, 6, 7, 8, 9, 10, 14, 46, 47, 15, 16, 17, 45, 18, 45, 20, 36, 37, 38, 39, 40, 41, 42, 3, 4, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 45, 15, 16, 17, 18, 19, 20, 21, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 37, 46, 47, 0, 46, 47, 43, 45, 45, 46, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 45, 15, 16, 17, 18, 19, 20, 21, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 46, 37, 3, 4, 24, 25, 47, 43, 47, 45, 46, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 47, 15, 16, 17, 18, 19, 20, 21, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 87, 88, 37, 23, 85, 47, 46, -1, 43, -1, 45, 3, 4, -1, 6, 7, 8, -1, -1, -1, 12, 13, -1, 15, 16, 17, 18, 19, 20, 21, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, -1, -1, -1, -1, 37, -1, -1, -1, -1, -1, 43, -1, 45, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, -1, -1, -1, 46, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 38, 39, 40, 41, 42 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 3, 4, 6, 7, 8, 12, 13, 15, 16, 17, 18, 19, 20, 21, 37, 43, 45, 49, 50, 52, 57, 45, 45, 45, 45, 45, 52, 52, 52, 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 5, 9, 10, 11, 46, 52, 53, 54, 9, 10, 18, 20, 50, 51, 46, 54, 54, 46, 46, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 46, 47, 47, 47, 47, 46, 46, 53, 3, 4, 55, 55, 14, 55, 56, 46, 47, 46, 46, 3, 4 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrlab1 /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror (pParser, "syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ Current.first_line = Rhs[1].first_line; \ Current.first_column = Rhs[1].first_column; \ Current.last_line = Rhs[N].last_line; \ Current.last_column = Rhs[N].last_column; #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval, pParser) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (cinluded). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short *bottom, short *top) #else static void yy_stack_print (bottom, top) short *bottom; short *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylineno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylineno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( ExprParser_t * pParser ); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( ExprParser_t * pParser ) #else int yyparse (pParser) ExprParser_t * pParser ; #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short yyssa[YYINITDEPTH]; short *yyss = yyssa; register short *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 2: { pParser->m_iParsed = yyvsp[0].iNode; ;} break; case 3: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_INT, yyvsp[0].iAttrLocator ); ;} break; case 4: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_BITS, yyvsp[0].iAttrLocator ); ;} break; case 5: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_FLOAT, yyvsp[0].iAttrLocator ); ;} break; case 7: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_MVA32, yyvsp[0].iAttrLocator ) ;} break; case 8: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_MVA64, yyvsp[0].iAttrLocator ) ;} break; case 11: { yyval.iNode = pParser->AddNodeInt ( yyvsp[0].iConst ); ;} break; case 12: { yyval.iNode = pParser->AddNodeFloat ( yyvsp[0].fConst ); ;} break; case 13: { yyval.iNode = pParser->AddNodeID(); ;} break; case 14: { yyval.iNode = pParser->AddNodeWeight(); ;} break; case 15: { yyval.iNode = pParser->AddNodeID(); ;} break; case 16: { yyval.iNode = pParser->AddNodeWeight(); ;} break; case 17: { yyval.iNode = pParser->AddNodeHookIdent ( yyvsp[0].iNode ); ;} break; case 18: { yyval.iNode = pParser->AddNodeOp ( TOK_NEG, yyvsp[0].iNode, -1 ); ;} break; case 19: { yyval.iNode = pParser->AddNodeOp ( TOK_NOT, yyvsp[0].iNode, -1 ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 20: { yyval.iNode = pParser->AddNodeOp ( '+', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 21: { yyval.iNode = pParser->AddNodeOp ( '-', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 22: { yyval.iNode = pParser->AddNodeOp ( '*', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 23: { yyval.iNode = pParser->AddNodeOp ( '/', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 24: { yyval.iNode = pParser->AddNodeOp ( '<', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 25: { yyval.iNode = pParser->AddNodeOp ( '>', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 26: { yyval.iNode = pParser->AddNodeOp ( '&', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 27: { yyval.iNode = pParser->AddNodeOp ( '|', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 28: { yyval.iNode = pParser->AddNodeOp ( '%', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 29: { yyval.iNode = pParser->AddNodeFunc ( FUNC_IDIV, pParser->AddNodeOp ( ',', yyvsp[-2].iNode, yyvsp[0].iNode ) ); ;} break; case 30: { yyval.iNode = pParser->AddNodeOp ( '%', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 31: { yyval.iNode = pParser->AddNodeOp ( TOK_LTE, yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 32: { yyval.iNode = pParser->AddNodeOp ( TOK_GTE, yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 33: { yyval.iNode = pParser->AddNodeOp ( TOK_EQ, yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 34: { yyval.iNode = pParser->AddNodeOp ( TOK_NE, yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 35: { yyval.iNode = pParser->AddNodeOp ( TOK_AND, yyvsp[-2].iNode, yyvsp[0].iNode ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 36: { yyval.iNode = pParser->AddNodeOp ( TOK_OR, yyvsp[-2].iNode, yyvsp[0].iNode ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 37: { yyval.iNode = yyvsp[-1].iNode; ;} break; case 39: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_STRING, yyvsp[0].iAttrLocator ); ;} break; case 40: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_MVA32, yyvsp[0].iAttrLocator ); ;} break; case 41: { yyval.iNode = pParser->AddNodeAttr ( TOK_ATTR_MVA64, yyvsp[0].iAttrLocator ); ;} break; case 42: { yyval.iNode = pParser->AddNodeString ( yyvsp[0].iConst ); ;} break; case 43: { yyval.iNode = yyvsp[0].iNode; ;} break; case 44: { yyval.iNode = pParser->AddNodeOp ( ',', yyvsp[-2].iNode, yyvsp[0].iNode ); ;} break; case 45: { yyval.iNode = pParser->AddNodeConstlist ( yyvsp[0].iConst ); ;} break; case 46: { yyval.iNode = pParser->AddNodeConstlist ( yyvsp[0].fConst ); ;} break; case 47: { pParser->AppendToConstlist ( yyval.iNode, yyvsp[0].iConst ); ;} break; case 48: { pParser->AppendToConstlist ( yyval.iNode, yyvsp[0].fConst ); ;} break; case 50: { yyval.iNode = pParser->AddNodeUservar ( yyvsp[0].iNode ); ;} break; case 51: { yyval.iNode = pParser->AddNodeFunc ( yyvsp[-3].iFunc, yyvsp[-1].iNode ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 52: { yyval.iNode = pParser->AddNodeFunc ( yyvsp[-2].iFunc, -1 ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 53: { yyval.iNode = pParser->AddNodeUdf ( yyvsp[-3].iNode, yyvsp[-1].iNode ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 54: { yyval.iNode = pParser->AddNodeUdf ( yyvsp[-2].iNode, -1 ); if ( yyval.iNode<0 ) YYERROR; ;} break; case 55: { yyval.iNode = pParser->AddNodeFunc ( yyvsp[-5].iFunc, yyvsp[-3].iNode, yyvsp[-1].iNode ); ;} break; case 56: { yyval.iNode = pParser->AddNodeFunc ( yyvsp[-5].iFunc, pParser->AddNodeID(), yyvsp[-1].iNode ); ;} break; case 57: { yyval.iNode = pParser->AddNodeFunc ( yyvsp[-5].iFunc, pParser->AddNodeID(), yyvsp[-1].iNode ); ;} break; case 58: { yyval.iNode = pParser->AddNodeHookFunc ( yyvsp[-3].iNode, yyvsp[-1].iNode ); if ( yyval.iNode<0 ) YYERROR; ;} break; } /* Line 991 of yacc.c. */ yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); char *yymsg; int yyx, yycount; yycount = 0; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) yysize += yystrlen (yytname[yyx]) + 15, yycount++; yysize += yystrlen ("syntax error, unexpected ") + 1; yysize += yystrlen (yytname[yytype]); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 5) { yycount = 0; for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { const char *yyq = ! yycount ? ", expecting " : " or "; yyp = yystpcpy (yyp, yyq); yyp = yystpcpy (yyp, yytname[yyx]); yycount++; } } yyerror (pParser, yymsg); YYSTACK_FREE (yymsg); } else yyerror (pParser, "syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror (pParser, "syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ /* Return failure if at end of input. */ if (yychar == YYEOF) { /* Pop the error token. */ YYPOPSTACK; /* Pop the rest of the stack. */ while (yyss < yyssp) { YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); YYPOPSTACK; } YYABORT; } YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab2; /*----------------------------------------------------. | yyerrlab1 -- error raised explicitly by an action. | `----------------------------------------------------*/ yyerrlab1: /* Suppress GCC warning that yyerrlab1 is unused when no action invokes YYERROR. */ #if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__) // __attribute__ ((__unused__)) #endif goto yyerrlab2; /*---------------------------------------------------------------. | yyerrlab2 -- pop states until the error token can be shifted. | `---------------------------------------------------------------*/ yyerrlab2: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); yyvsp--; yystate = *--yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror (pParser, "parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } sphinx-2.0.4-release/src/sphinxstemcz.cpp0000644000176700017710000001271411711621267020006 0ustar deogardeogar// // $Id: sphinxstemcz.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include struct ClampRule_t { int m_iMinLength; BYTE m_szSuffix[10]; int m_iCheckLength; int m_nRemove; bool m_bPalatalize; }; static ClampRule_t g_dCaseRules [] = { { 7, "atech", 5, 5, false }, { 6, "\xECtem", 4, 3, true }, // \u011b { 6, "at\xF9m", 4, 4, false }, // \u016f { 5, "ech", 3, 2, true }, { 5, "ich", 3, 2, true }, { 5, "\xED!ch", 3, 2, true }, // \u00ed { 5, "\xE9ho", 3, 2, true }, // \u00e9 { 5, "\xECmi", 3, 2, true }, // \u011b { 5, "emi", 3, 2, true }, { 5, "\xE9mu", 3, 2, true }, // \u00e9 { 5, "\xECte", 3, 2, true }, // \u011b { 5, "\xECti", 3, 2, true }, // \u011b { 5, "iho", 3, 2, true }, { 5, "\xEDho", 3, 2, true }, // \u00ed { 5, "\xEDmi", 3, 2, true }, // \u00ed { 5, "imu", 3, 2, true }, { 5, "\xE1!ch", 3, 3, false }, // \u00e1 { 5, "ata", 3, 3, false }, { 5, "aty", 3, 3, false }, { 5, "\xFD!ch", 3, 3, false }, // \u00fd { 5, "ama", 3, 3, false }, { 5, "ami", 3, 3, false }, { 5, "ov\xE9", 3, 3, false }, // \u00e9 { 5, "ovi", 3, 3, false }, { 5, "\xFDmi", 3, 3, false }, // \u00fd { 4, "em", 2, 1, true }, { 4, "es", 2, 2, true }, { 4, "\xE9m", 2, 2, true }, // \u00e9 { 4, "\xEDm", 2, 2, true }, // \u00ed { 4, "\xF9!fm", 2, 2, false }, // \u016f { 4, "at", 2, 2, false }, { 4, "\xE1m", 2, 2, false }, // \u00e1 { 4, "os", 2, 2, false }, { 4, "us", 2, 2, false }, { 4, "\xFDm", 2, 2, false }, // \u00fd { 4, "mi", 2, 2, false }, { 4, "ou", 2, 2, false }, { 3, "e", 1, 0, true }, { 3, "i", 1, 0, true }, { 3, "\xED", 1, 0, true }, // \u00ed { 3, "\xEC", 1, 0, true }, // \u011b { 3, "u", 1, 1, false }, { 3, "y", 1, 1, false }, { 3, "\xF9", 1, 1, false }, // \u016f { 3, "a", 1, 1, false }, { 3, "o", 1, 1, false }, { 3, "\xE1", 1, 1, false }, // \u00e1 { 3, "\xE9", 1, 1, false }, // \u00e9 { 3, "\xFD", 1, 1, false } // \u00fd }; static ClampRule_t g_dPosessiveRules [] = { { 5, "ov", 2, 2, false }, { 5, "\xF9v", 2, 2, false }, { 5, "in", 2, 1, true }, }; struct ReplaceRule_t { BYTE m_szSuffix[4]; int m_iRemoveLength; BYTE m_szAppend[4]; }; static ReplaceRule_t g_dPalatalizeRules [] = { { "ci", 2, "k" }, { "ce", 2, "k" }, { "\xE8i", 2, "k" }, // \u010d { "\xE8!e", 2, "k" }, // \u010d { "zi", 2, "h" }, { "ze", 2, "h" }, { "\x9Ei", 2, "h" }, // \u017e { "\x9E!e", 2, "h" }, // \u017e { "\xE8t\xEC", 3, "ck" }, // \u010d \u011b { "\xE8ti", 3, "ck" }, { "\xE8t\xED", 3, "ck" }, // \u010d \u00ed { "\x9At\xEC", 3, "sk" }, // \u0161 \u011b // was: check 2, remove 2 { "\x9Ati", 3, "sk" }, // \u0161 // was: check 2, remove 2 { "\x9At\xED", 3, "sk" }, // \u0161 \u00ed // was: check 2, remove 2 }; static void Palatalize ( BYTE * word ) { if ( !word ) return; int nRules = sizeof ( g_dPalatalizeRules ) / sizeof ( g_dPalatalizeRules[0] ); int iWordLength = strlen ( (char*)word ); for ( int i = 0; i < nRules; ++i ) { const ReplaceRule_t & Rule = g_dPalatalizeRules[i]; if ( iWordLength>=Rule.m_iRemoveLength && !strncmp ( (char*)word + iWordLength - Rule.m_iRemoveLength, (char*)Rule.m_szSuffix, Rule.m_iRemoveLength ) ) { word [iWordLength - Rule.m_iRemoveLength] = '\0'; strcat ( (char*)word, (char*)Rule.m_szAppend ); // NOLINT strcat return; } } if ( iWordLength > 0 ) word [iWordLength - 1] = '\0'; } static void ApplyRules ( BYTE * word, const ClampRule_t * pRules, int nRules ) { if ( !word || !pRules ) return; int iWordLength = strlen ( (char *)word ); for ( int i = 0; i < nRules; ++i ) { const ClampRule_t & Rule = pRules[i]; if ( iWordLength > Rule.m_iMinLength && !strncmp ( (char*)word + iWordLength - Rule.m_iCheckLength, (char*)Rule.m_szSuffix, Rule.m_iCheckLength )) { word [iWordLength - Rule.m_nRemove] = '\0'; Palatalize ( word ); return; } } } static void RemoveChars ( char * szString, char cChar ) { char * szPos; int iLength = strlen ( szString ); while ( ( szPos = strchr ( szString, cChar ) )!=NULL ) memmove ( szPos, szPos + 1, iLength - ( szPos - szString ) ); } static void PreprocessRules ( ClampRule_t * pRules, int nRules ) { if ( !pRules ) return; for ( int i = 0; i < nRules; ++i ) RemoveChars ( (char *) pRules[i].m_szSuffix, '!' ); } static void PreprocessReplace () { int nRules = sizeof ( g_dPalatalizeRules ) / sizeof ( g_dPalatalizeRules[0] ); for ( int i = 0; i < nRules; ++i ) { RemoveChars ( (char *) g_dPalatalizeRules[i].m_szSuffix, '!' ); RemoveChars ( (char *) g_dPalatalizeRules[i].m_szAppend, '!' ); } } void stem_cz_init () { PreprocessRules ( g_dCaseRules, sizeof ( g_dCaseRules ) / sizeof ( g_dCaseRules[0] ) ); PreprocessRules ( g_dPosessiveRules, sizeof ( g_dPosessiveRules ) / sizeof ( g_dPosessiveRules[0] ) ); PreprocessReplace (); } void stem_cz ( BYTE * word ) { ApplyRules ( word, g_dCaseRules, sizeof ( g_dCaseRules ) / sizeof ( g_dCaseRules[0] ) ); ApplyRules ( word, g_dPosessiveRules, sizeof ( g_dPosessiveRules ) / sizeof ( g_dPosessiveRules[0] ) ); } // // $Id: sphinxstemcz.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxexpr.y0000644000176700017710000001232711712074354017145 0ustar deogardeogar%lex-param { ExprParser_t * pParser } %parse-param { ExprParser_t * pParser } %pure-parser %error-verbose %union { int64_t iConst; // constant value float fConst; // constant value uint64_t iAttrLocator; // attribute locator (rowitem for int/float; offset+size for bits) int iFunc; // function id int iNode; // node, or uservar, or udf index }; %token TOK_CONST_INT %token TOK_CONST_FLOAT %token TOK_CONST_STRING %token TOK_ATTR_INT %token TOK_ATTR_BITS %token TOK_ATTR_FLOAT %token TOK_ATTR_MVA32 %token TOK_ATTR_MVA64 %token TOK_ATTR_STRING %token TOK_FUNC %token TOK_FUNC_IN %token TOK_USERVAR %token TOK_UDF %token TOK_HOOK_IDENT %token TOK_HOOK_FUNC %token TOK_ATID %token TOK_ATWEIGHT %token TOK_ID %token TOK_WEIGHT %token TOK_COUNT %token TOK_DISTINCT %token TOK_CONST_LIST %token TOK_ATTR_SINT %type attr %type attr_mva %type expr %type arg %type arglist %type constlist %type constlist_or_uservar %type function %left TOK_OR %left TOK_AND %left '|' %left '&' %left TOK_EQ TOK_NE %left '<' '>' TOK_LTE TOK_GTE %left '+' '-' %left '*' '/' '%' TOK_DIV TOK_MOD %nonassoc TOK_NOT %nonassoc TOK_NEG %% exprline: expr { pParser->m_iParsed = $1; } ; attr: TOK_ATTR_INT { $$ = pParser->AddNodeAttr ( TOK_ATTR_INT, $1 ); } | TOK_ATTR_BITS { $$ = pParser->AddNodeAttr ( TOK_ATTR_BITS, $1 ); } | TOK_ATTR_FLOAT { $$ = pParser->AddNodeAttr ( TOK_ATTR_FLOAT, $1 ); } ; attr_mva: attr | TOK_ATTR_MVA32 { $$ = pParser->AddNodeAttr ( TOK_ATTR_MVA32, $1 ) } | TOK_ATTR_MVA64 { $$ = pParser->AddNodeAttr ( TOK_ATTR_MVA64, $1 ) } ; expr: attr | function | TOK_CONST_INT { $$ = pParser->AddNodeInt ( $1 ); } | TOK_CONST_FLOAT { $$ = pParser->AddNodeFloat ( $1 ); } | TOK_ATID { $$ = pParser->AddNodeID(); } | TOK_ATWEIGHT { $$ = pParser->AddNodeWeight(); } | TOK_ID { $$ = pParser->AddNodeID(); } | TOK_WEIGHT '(' ')' { $$ = pParser->AddNodeWeight(); } | TOK_HOOK_IDENT { $$ = pParser->AddNodeHookIdent ( $1 ); } | '-' expr %prec TOK_NEG { $$ = pParser->AddNodeOp ( TOK_NEG, $2, -1 ); } | TOK_NOT expr { $$ = pParser->AddNodeOp ( TOK_NOT, $2, -1 ); if ( $$<0 ) YYERROR; } | expr '+' expr { $$ = pParser->AddNodeOp ( '+', $1, $3 ); } | expr '-' expr { $$ = pParser->AddNodeOp ( '-', $1, $3 ); } | expr '*' expr { $$ = pParser->AddNodeOp ( '*', $1, $3 ); } | expr '/' expr { $$ = pParser->AddNodeOp ( '/', $1, $3 ); } | expr '<' expr { $$ = pParser->AddNodeOp ( '<', $1, $3 ); } | expr '>' expr { $$ = pParser->AddNodeOp ( '>', $1, $3 ); } | expr '&' expr { $$ = pParser->AddNodeOp ( '&', $1, $3 ); } | expr '|' expr { $$ = pParser->AddNodeOp ( '|', $1, $3 ); } | expr '%' expr { $$ = pParser->AddNodeOp ( '%', $1, $3 ); } | expr TOK_DIV expr { $$ = pParser->AddNodeFunc ( FUNC_IDIV, pParser->AddNodeOp ( ',', $1, $3 ) ); } | expr TOK_MOD expr { $$ = pParser->AddNodeOp ( '%', $1, $3 ); } | expr TOK_LTE expr { $$ = pParser->AddNodeOp ( TOK_LTE, $1, $3 ); } | expr TOK_GTE expr { $$ = pParser->AddNodeOp ( TOK_GTE, $1, $3 ); } | expr TOK_EQ expr { $$ = pParser->AddNodeOp ( TOK_EQ, $1, $3 ); } | expr TOK_NE expr { $$ = pParser->AddNodeOp ( TOK_NE, $1, $3 ); } | expr TOK_AND expr { $$ = pParser->AddNodeOp ( TOK_AND, $1, $3 ); if ( $$<0 ) YYERROR; } | expr TOK_OR expr { $$ = pParser->AddNodeOp ( TOK_OR, $1, $3 ); if ( $$<0 ) YYERROR; } | '(' expr ')' { $$ = $2; } ; arg: expr | TOK_ATTR_STRING { $$ = pParser->AddNodeAttr ( TOK_ATTR_STRING, $1 ); } | TOK_ATTR_MVA32 { $$ = pParser->AddNodeAttr ( TOK_ATTR_MVA32, $1 ); } | TOK_ATTR_MVA64 { $$ = pParser->AddNodeAttr ( TOK_ATTR_MVA64, $1 ); } | TOK_CONST_STRING { $$ = pParser->AddNodeString ( $1 ); } ; arglist: arg { $$ = $1; } | arglist ',' arg { $$ = pParser->AddNodeOp ( ',', $1, $3 ); } ; constlist: TOK_CONST_INT { $$ = pParser->AddNodeConstlist ( $1 ); } | TOK_CONST_FLOAT { $$ = pParser->AddNodeConstlist ( $1 ); } | constlist ',' TOK_CONST_INT { pParser->AppendToConstlist ( $$, $3 ); } | constlist ',' TOK_CONST_FLOAT { pParser->AppendToConstlist ( $$, $3 ); } ; constlist_or_uservar: constlist | TOK_USERVAR { $$ = pParser->AddNodeUservar ( $1 ); } ; function: TOK_FUNC '(' arglist ')' { $$ = pParser->AddNodeFunc ( $1, $3 ); if ( $$<0 ) YYERROR; } | TOK_FUNC '(' ')' { $$ = pParser->AddNodeFunc ( $1, -1 ); if ( $$<0 ) YYERROR; } | TOK_UDF '(' arglist ')' { $$ = pParser->AddNodeUdf ( $1, $3 ); if ( $$<0 ) YYERROR; } | TOK_UDF '(' ')' { $$ = pParser->AddNodeUdf ( $1, -1 ); if ( $$<0 ) YYERROR; } | TOK_FUNC_IN '(' attr_mva ',' constlist_or_uservar ')' { $$ = pParser->AddNodeFunc ( $1, $3, $5 ); } | TOK_FUNC_IN '(' TOK_ID ',' constlist ')' { $$ = pParser->AddNodeFunc ( $1, pParser->AddNodeID(), $5 ); } | TOK_FUNC_IN '(' TOK_ATID ',' constlist ')' { $$ = pParser->AddNodeFunc ( $1, pParser->AddNodeID(), $5 ); } | TOK_HOOK_FUNC '(' arglist ')' { $$ = pParser->AddNodeHookFunc ( $1, $3 ); if ( $$<0 ) YYERROR; } ; %% sphinx-2.0.4-release/src/yysphinxql.patch0000644000176700017710000000211111405303332017770 0ustar deogardeogar--- yysphinxql.c.orig Mon Jun 14 05:20:39 2010 +++ yysphinxql.c Mon Jun 14 05:13:37 2010 @@ -1953,7 +1953,7 @@ char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); - if (yycount < 5) + if (yycount < 4) { yycount = 0; for (yyx = yyn < 0 ? -yyn : 0; @@ -1967,6 +1967,19 @@ yycount++; } } + else + { + for (yyx = yyn < 0 ? -yyn : 0; + yyx < (int) (sizeof (yytname) / sizeof (char *)); + yyx++) + if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) + { + snprintf (yyp, (int)(yysize - (yyp - yymsg)), ", expecting %s (or %d other tokens)", yytname[yyx], yycount - 1); + while (*yyp++); + break; + } + } + yyerror (pParser, yymsg); YYSTACK_FREE (yymsg); } @@ -2019,7 +2032,7 @@ /* Suppress GCC warning that yyerrlab1 is unused when no action invokes YYERROR. */ #if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__) - __attribute__ ((__unused__)) +// __attribute__ ((__unused__)) #endif sphinx-2.0.4-release/src/yysphinxql.c0000644000176700017710000025447411674105325017152 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_IDENT = 258, TOK_ATIDENT = 259, TOK_CONST_INT = 260, TOK_CONST_FLOAT = 261, TOK_CONST_MVA = 262, TOK_QUOTED_STRING = 263, TOK_USERVAR = 264, TOK_SYSVAR = 265, TOK_CONST_STRINGS = 266, TOK_AS = 267, TOK_ASC = 268, TOK_ATTACH = 269, TOK_AVG = 270, TOK_BEGIN = 271, TOK_BETWEEN = 272, TOK_BY = 273, TOK_CALL = 274, TOK_COLLATION = 275, TOK_COMMIT = 276, TOK_COMMITTED = 277, TOK_COUNT = 278, TOK_CREATE = 279, TOK_DELETE = 280, TOK_DESC = 281, TOK_DESCRIBE = 282, TOK_DISTINCT = 283, TOK_DIV = 284, TOK_DROP = 285, TOK_FALSE = 286, TOK_FLOAT = 287, TOK_FLUSH = 288, TOK_FROM = 289, TOK_FUNCTION = 290, TOK_GLOBAL = 291, TOK_GROUP = 292, TOK_ID = 293, TOK_IN = 294, TOK_INDEX = 295, TOK_INSERT = 296, TOK_INT = 297, TOK_INTO = 298, TOK_ISOLATION = 299, TOK_LEVEL = 300, TOK_LIMIT = 301, TOK_MATCH = 302, TOK_MAX = 303, TOK_META = 304, TOK_MIN = 305, TOK_MOD = 306, TOK_NAMES = 307, TOK_NULL = 308, TOK_OPTION = 309, TOK_ORDER = 310, TOK_RAND = 311, TOK_READ = 312, TOK_REPEATABLE = 313, TOK_REPLACE = 314, TOK_RETURNS = 315, TOK_ROLLBACK = 316, TOK_RTINDEX = 317, TOK_SELECT = 318, TOK_SERIALIZABLE = 319, TOK_SET = 320, TOK_SESSION = 321, TOK_SHOW = 322, TOK_SONAME = 323, TOK_START = 324, TOK_STATUS = 325, TOK_SUM = 326, TOK_TABLES = 327, TOK_TO = 328, TOK_TRANSACTION = 329, TOK_TRUE = 330, TOK_UNCOMMITTED = 331, TOK_UPDATE = 332, TOK_VALUES = 333, TOK_VARIABLES = 334, TOK_WARNINGS = 335, TOK_WEIGHT = 336, TOK_WHERE = 337, TOK_WITHIN = 338, TOK_OR = 339, TOK_AND = 340, TOK_NE = 341, TOK_GTE = 342, TOK_LTE = 343, TOK_NOT = 344, TOK_NEG = 345 }; #endif #define TOK_IDENT 258 #define TOK_ATIDENT 259 #define TOK_CONST_INT 260 #define TOK_CONST_FLOAT 261 #define TOK_CONST_MVA 262 #define TOK_QUOTED_STRING 263 #define TOK_USERVAR 264 #define TOK_SYSVAR 265 #define TOK_CONST_STRINGS 266 #define TOK_AS 267 #define TOK_ASC 268 #define TOK_ATTACH 269 #define TOK_AVG 270 #define TOK_BEGIN 271 #define TOK_BETWEEN 272 #define TOK_BY 273 #define TOK_CALL 274 #define TOK_COLLATION 275 #define TOK_COMMIT 276 #define TOK_COMMITTED 277 #define TOK_COUNT 278 #define TOK_CREATE 279 #define TOK_DELETE 280 #define TOK_DESC 281 #define TOK_DESCRIBE 282 #define TOK_DISTINCT 283 #define TOK_DIV 284 #define TOK_DROP 285 #define TOK_FALSE 286 #define TOK_FLOAT 287 #define TOK_FLUSH 288 #define TOK_FROM 289 #define TOK_FUNCTION 290 #define TOK_GLOBAL 291 #define TOK_GROUP 292 #define TOK_ID 293 #define TOK_IN 294 #define TOK_INDEX 295 #define TOK_INSERT 296 #define TOK_INT 297 #define TOK_INTO 298 #define TOK_ISOLATION 299 #define TOK_LEVEL 300 #define TOK_LIMIT 301 #define TOK_MATCH 302 #define TOK_MAX 303 #define TOK_META 304 #define TOK_MIN 305 #define TOK_MOD 306 #define TOK_NAMES 307 #define TOK_NULL 308 #define TOK_OPTION 309 #define TOK_ORDER 310 #define TOK_RAND 311 #define TOK_READ 312 #define TOK_REPEATABLE 313 #define TOK_REPLACE 314 #define TOK_RETURNS 315 #define TOK_ROLLBACK 316 #define TOK_RTINDEX 317 #define TOK_SELECT 318 #define TOK_SERIALIZABLE 319 #define TOK_SET 320 #define TOK_SESSION 321 #define TOK_SHOW 322 #define TOK_SONAME 323 #define TOK_START 324 #define TOK_STATUS 325 #define TOK_SUM 326 #define TOK_TABLES 327 #define TOK_TO 328 #define TOK_TRANSACTION 329 #define TOK_TRUE 330 #define TOK_UNCOMMITTED 331 #define TOK_UPDATE 332 #define TOK_VALUES 333 #define TOK_VARIABLES 334 #define TOK_WARNINGS 335 #define TOK_WEIGHT 336 #define TOK_WHERE 337 #define TOK_WITHIN 338 #define TOK_OR 339 #define TOK_AND 340 #define TOK_NE 341 #define TOK_GTE 342 #define TOK_LTE 343 #define TOK_NOT 344 #define TOK_NEG 345 /* Copy the first part of user declarations. */ #if USE_WINDOWS #pragma warning(push,1) #pragma warning(disable:4702) // unreachable code #endif // some helpers #include // for FLT_MAX /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 0 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 1 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef int YYSTYPE; # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #if ! defined (yyoverflow) || YYERROR_VERBOSE /* The parser invokes alloca or malloc; define the necessary symbols. */ # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # else # ifndef YYSTACK_USE_ALLOCA # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC malloc # define YYSTACK_FREE free # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 91 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 778 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 105 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 83 /* YYNRULES -- Number of rules. */ #define YYNRULES 235 /* YYNRULES -- Number of states. */ #define YYNSTATES 438 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 345 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 98, 87, 2, 103, 104, 96, 94, 102, 95, 2, 97, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 101, 90, 88, 91, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 86, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 89, 92, 93, 99, 100 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned short yyprhs[] = { 0, 0, 3, 5, 7, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 50, 52, 54, 65, 67, 71, 73, 76, 77, 79, 82, 84, 89, 94, 99, 104, 109, 113, 119, 121, 125, 126, 128, 131, 133, 137, 142, 146, 150, 156, 163, 167, 172, 178, 182, 186, 190, 194, 198, 202, 206, 210, 216, 220, 224, 226, 228, 233, 237, 239, 241, 244, 246, 249, 251, 255, 256, 258, 262, 263, 265, 271, 272, 274, 278, 284, 286, 290, 292, 295, 298, 299, 301, 304, 309, 310, 312, 315, 317, 321, 325, 329, 335, 342, 346, 348, 352, 356, 358, 360, 362, 364, 366, 368, 371, 374, 378, 382, 386, 390, 394, 398, 402, 406, 410, 414, 418, 422, 426, 430, 434, 438, 442, 446, 448, 453, 458, 462, 469, 476, 478, 482, 484, 486, 489, 491, 493, 495, 497, 499, 501, 503, 505, 510, 515, 520, 524, 529, 537, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 564, 571, 573, 575, 576, 580, 582, 586, 588, 592, 596, 598, 602, 604, 606, 608, 612, 615, 623, 633, 640, 642, 646, 648, 652, 654, 658, 659, 662, 664, 668, 672, 673, 675, 677, 679, 682, 684, 686, 689, 695, 697, 701, 705, 709, 715, 720, 724, 727, 734, 735, 737, 739, 742, 745, 748, 750, 758, 760, 762, 766, 773, 777 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const short yyrhs[] = { 106, 0, -1, 107, -1, 108, -1, 108, 101, -1, 153, -1, 161, -1, 147, -1, 148, -1, 151, -1, 162, -1, 171, -1, 173, -1, 174, -1, 177, -1, 178, -1, 182, -1, 184, -1, 185, -1, 186, -1, 179, -1, 187, -1, 109, -1, 108, 101, 109, -1, 110, -1, 144, -1, 63, 111, 34, 115, 116, 124, 126, 128, 132, 134, -1, 112, -1, 111, 102, 112, -1, 96, -1, 114, 113, -1, -1, 3, -1, 12, 3, -1, 140, -1, 15, 103, 140, 104, -1, 48, 103, 140, 104, -1, 50, 103, 140, 104, -1, 71, 103, 140, 104, -1, 23, 103, 96, 104, -1, 81, 103, 104, -1, 23, 103, 28, 3, 104, -1, 3, -1, 115, 102, 3, -1, -1, 117, -1, 82, 118, -1, 119, -1, 118, 85, 119, -1, 47, 103, 8, 104, -1, 120, 88, 121, -1, 120, 89, 121, -1, 120, 39, 103, 123, 104, -1, 120, 99, 39, 103, 123, 104, -1, 120, 39, 9, -1, 120, 99, 39, 9, -1, 120, 17, 121, 85, 121, -1, 120, 91, 121, -1, 120, 90, 121, -1, 120, 92, 121, -1, 120, 93, 121, -1, 120, 88, 122, -1, 120, 89, 122, -1, 120, 91, 122, -1, 120, 90, 122, -1, 120, 17, 122, 85, 122, -1, 120, 92, 122, -1, 120, 93, 122, -1, 3, -1, 4, -1, 23, 103, 96, 104, -1, 81, 103, 104, -1, 38, -1, 5, -1, 95, 5, -1, 6, -1, 95, 6, -1, 121, -1, 123, 102, 121, -1, -1, 125, -1, 37, 18, 120, -1, -1, 127, -1, 83, 37, 55, 18, 130, -1, -1, 129, -1, 55, 18, 130, -1, 55, 18, 56, 103, 104, -1, 131, -1, 130, 102, 131, -1, 120, -1, 120, 13, -1, 120, 26, -1, -1, 133, -1, 46, 5, -1, 46, 5, 102, 5, -1, -1, 135, -1, 54, 136, -1, 137, -1, 136, 102, 137, -1, 3, 88, 3, -1, 3, 88, 5, -1, 3, 88, 103, 138, 104, -1, 3, 88, 3, 103, 8, 104, -1, 3, 88, 8, -1, 139, -1, 138, 102, 139, -1, 3, 88, 121, -1, 3, -1, 4, -1, 38, -1, 5, -1, 6, -1, 9, -1, 95, 140, -1, 99, 140, -1, 140, 94, 140, -1, 140, 95, 140, -1, 140, 96, 140, -1, 140, 97, 140, -1, 140, 90, 140, -1, 140, 91, 140, -1, 140, 87, 140, -1, 140, 86, 140, -1, 140, 98, 140, -1, 140, 29, 140, -1, 140, 51, 140, -1, 140, 93, 140, -1, 140, 92, 140, -1, 140, 88, 140, -1, 140, 89, 140, -1, 140, 85, 140, -1, 140, 84, 140, -1, 103, 140, 104, -1, 141, -1, 3, 103, 142, 104, -1, 39, 103, 142, 104, -1, 3, 103, 104, -1, 50, 103, 140, 102, 140, 104, -1, 48, 103, 140, 102, 140, 104, -1, 143, -1, 142, 102, 143, -1, 140, -1, 8, -1, 67, 145, -1, 80, -1, 70, -1, 49, -1, 3, -1, 53, -1, 8, -1, 5, -1, 6, -1, 65, 3, 88, 150, -1, 65, 3, 88, 149, -1, 65, 3, 88, 53, -1, 65, 52, 146, -1, 65, 10, 88, 146, -1, 65, 36, 9, 88, 103, 123, 104, -1, 65, 36, 3, 88, 149, -1, 3, -1, 8, -1, 75, -1, 31, -1, 121, -1, 21, -1, 61, -1, 152, -1, 16, -1, 69, 74, -1, 154, 43, 3, 155, 78, 157, -1, 41, -1, 59, -1, -1, 103, 156, 104, -1, 120, -1, 156, 102, 120, -1, 158, -1, 157, 102, 158, -1, 103, 159, 104, -1, 160, -1, 159, 102, 160, -1, 121, -1, 122, -1, 8, -1, 103, 123, 104, -1, 103, 104, -1, 25, 34, 3, 82, 38, 88, 121, -1, 25, 34, 3, 82, 38, 39, 103, 123, 104, -1, 19, 3, 103, 163, 166, 104, -1, 164, -1, 163, 102, 164, -1, 160, -1, 103, 165, 104, -1, 8, -1, 165, 102, 8, -1, -1, 102, 167, -1, 168, -1, 167, 102, 168, -1, 160, 169, 170, -1, -1, 12, -1, 3, -1, 46, -1, 172, 3, -1, 27, -1, 26, -1, 67, 72, -1, 77, 115, 65, 175, 117, -1, 176, -1, 175, 102, 176, -1, 3, 88, 121, -1, 3, 88, 122, -1, 3, 88, 103, 123, 104, -1, 3, 88, 103, 104, -1, 67, 180, 79, -1, 67, 20, -1, 65, 180, 74, 44, 45, 181, -1, -1, 36, -1, 66, -1, 57, 76, -1, 57, 22, -1, 58, 57, -1, 64, -1, 24, 35, 3, 60, 183, 68, 8, -1, 42, -1, 32, -1, 30, 35, 3, -1, 14, 40, 3, 73, 62, 3, -1, 33, 62, 3, -1, 63, 10, 132, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned short yyrline[] = { 0, 119, 119, 120, 121, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 147, 148, 152, 153, 157, 172, 173, 177, 178, 181, 183, 184, 188, 189, 190, 191, 192, 193, 194, 195, 199, 200, 203, 205, 209, 213, 214, 218, 223, 230, 238, 246, 255, 260, 265, 270, 275, 280, 285, 290, 291, 292, 293, 298, 303, 308, 316, 317, 322, 328, 334, 343, 344, 348, 349, 353, 359, 365, 367, 371, 378, 380, 384, 390, 392, 396, 400, 407, 408, 412, 413, 414, 417, 419, 423, 428, 435, 437, 441, 445, 446, 450, 455, 460, 466, 471, 479, 484, 491, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 531, 532, 533, 534, 535, 539, 540, 544, 545, 551, 555, 556, 557, 563, 564, 565, 566, 567, 571, 576, 581, 586, 587, 591, 596, 604, 605, 609, 610, 611, 625, 626, 627, 631, 632, 638, 646, 647, 650, 652, 656, 657, 661, 662, 666, 670, 671, 675, 676, 677, 678, 679, 685, 691, 703, 711, 715, 722, 723, 730, 740, 746, 748, 752, 757, 761, 768, 770, 774, 775, 781, 789, 790, 796, 802, 810, 811, 815, 819, 823, 827, 837, 844, 851, 857, 858, 859, 863, 864, 865, 866, 872, 883, 884, 888, 899, 911, 922 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "TOK_IDENT", "TOK_ATIDENT", "TOK_CONST_INT", "TOK_CONST_FLOAT", "TOK_CONST_MVA", "TOK_QUOTED_STRING", "TOK_USERVAR", "TOK_SYSVAR", "TOK_CONST_STRINGS", "TOK_AS", "TOK_ASC", "TOK_ATTACH", "TOK_AVG", "TOK_BEGIN", "TOK_BETWEEN", "TOK_BY", "TOK_CALL", "TOK_COLLATION", "TOK_COMMIT", "TOK_COMMITTED", "TOK_COUNT", "TOK_CREATE", "TOK_DELETE", "TOK_DESC", "TOK_DESCRIBE", "TOK_DISTINCT", "TOK_DIV", "TOK_DROP", "TOK_FALSE", "TOK_FLOAT", "TOK_FLUSH", "TOK_FROM", "TOK_FUNCTION", "TOK_GLOBAL", "TOK_GROUP", "TOK_ID", "TOK_IN", "TOK_INDEX", "TOK_INSERT", "TOK_INT", "TOK_INTO", "TOK_ISOLATION", "TOK_LEVEL", "TOK_LIMIT", "TOK_MATCH", "TOK_MAX", "TOK_META", "TOK_MIN", "TOK_MOD", "TOK_NAMES", "TOK_NULL", "TOK_OPTION", "TOK_ORDER", "TOK_RAND", "TOK_READ", "TOK_REPEATABLE", "TOK_REPLACE", "TOK_RETURNS", "TOK_ROLLBACK", "TOK_RTINDEX", "TOK_SELECT", "TOK_SERIALIZABLE", "TOK_SET", "TOK_SESSION", "TOK_SHOW", "TOK_SONAME", "TOK_START", "TOK_STATUS", "TOK_SUM", "TOK_TABLES", "TOK_TO", "TOK_TRANSACTION", "TOK_TRUE", "TOK_UNCOMMITTED", "TOK_UPDATE", "TOK_VALUES", "TOK_VARIABLES", "TOK_WARNINGS", "TOK_WEIGHT", "TOK_WHERE", "TOK_WITHIN", "TOK_OR", "TOK_AND", "'|'", "'&'", "'='", "TOK_NE", "'<'", "'>'", "TOK_GTE", "TOK_LTE", "'+'", "'-'", "'*'", "'/'", "'%'", "TOK_NOT", "TOK_NEG", "';'", "','", "'('", "')'", "$accept", "request", "statement", "multi_stmt_list", "multi_stmt", "select_from", "select_items_list", "select_item", "opt_alias", "select_expr", "ident_list", "opt_where_clause", "where_clause", "where_expr", "where_item", "expr_ident", "const_int", "const_float", "const_list", "opt_group_clause", "group_clause", "opt_group_order_clause", "group_order_clause", "opt_order_clause", "order_clause", "order_items_list", "order_item", "opt_limit_clause", "limit_clause", "opt_option_clause", "option_clause", "option_list", "option_item", "named_const_list", "named_const", "expr", "function", "arglist", "arg", "show_stmt", "show_variable", "set_value", "set_stmt", "set_global_stmt", "set_string_value", "boolean_value", "transact_op", "start_transaction", "insert_into", "insert_or_replace", "opt_column_list", "column_list", "insert_rows_list", "insert_row", "insert_vals_list", "insert_val", "delete_from", "call_proc", "call_args_list", "call_arg", "const_string_list", "opt_call_opts_list", "call_opts_list", "call_opt", "opt_as", "call_opt_name", "describe", "describe_tok", "show_tables", "update", "update_items_list", "update_item", "show_variables", "show_collation", "set_transaction", "opt_scope", "isolation_level", "create_function", "udf_type", "drop_function", "attach_index", "flush_rtindex", "select_sysvar", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 124, 38, 61, 341, 60, 62, 342, 343, 43, 45, 42, 47, 37, 344, 345, 59, 44, 40, 41 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 105, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 109, 109, 110, 111, 111, 112, 112, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 116, 116, 117, 118, 118, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 121, 121, 122, 122, 123, 123, 124, 124, 125, 126, 126, 127, 128, 128, 129, 129, 130, 130, 131, 131, 131, 132, 132, 133, 133, 134, 134, 135, 136, 136, 137, 137, 137, 137, 137, 138, 138, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 142, 142, 143, 143, 144, 145, 145, 145, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 148, 148, 149, 149, 150, 150, 150, 151, 151, 151, 152, 152, 153, 154, 154, 155, 155, 156, 156, 157, 157, 158, 159, 159, 160, 160, 160, 160, 160, 161, 161, 162, 163, 163, 164, 164, 165, 165, 166, 166, 167, 167, 168, 169, 169, 170, 170, 171, 172, 172, 173, 174, 175, 175, 176, 176, 176, 176, 177, 178, 179, 180, 180, 180, 181, 181, 181, 181, 182, 183, 183, 184, 185, 186, 187 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 10, 1, 3, 1, 2, 0, 1, 2, 1, 4, 4, 4, 4, 4, 3, 5, 1, 3, 0, 1, 2, 1, 3, 4, 3, 3, 5, 6, 3, 4, 5, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 3, 1, 1, 4, 3, 1, 1, 2, 1, 2, 1, 3, 0, 1, 3, 0, 1, 5, 0, 1, 3, 5, 1, 3, 1, 2, 2, 0, 1, 2, 4, 0, 1, 2, 1, 3, 3, 3, 5, 6, 3, 1, 3, 3, 1, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 4, 4, 3, 6, 6, 1, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 4, 4, 4, 3, 4, 7, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 1, 0, 3, 1, 3, 1, 3, 3, 1, 3, 1, 1, 1, 3, 2, 7, 9, 6, 1, 3, 1, 3, 1, 3, 0, 2, 1, 3, 3, 0, 1, 1, 1, 2, 1, 1, 2, 5, 1, 3, 3, 3, 5, 4, 3, 2, 6, 0, 1, 1, 2, 2, 2, 1, 7, 1, 1, 3, 6, 3, 3 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 0, 0, 171, 0, 168, 0, 0, 210, 209, 0, 0, 174, 175, 169, 0, 222, 222, 0, 0, 0, 2, 3, 22, 24, 25, 7, 8, 9, 170, 5, 0, 6, 10, 11, 0, 12, 13, 14, 15, 20, 16, 17, 18, 19, 21, 0, 0, 0, 0, 0, 0, 111, 112, 114, 115, 116, 94, 0, 0, 113, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 27, 31, 34, 137, 0, 0, 223, 0, 224, 0, 220, 223, 150, 149, 211, 148, 147, 0, 172, 42, 0, 1, 4, 0, 208, 0, 0, 0, 0, 232, 234, 0, 0, 235, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 117, 118, 0, 0, 0, 32, 0, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151, 154, 155, 153, 152, 159, 0, 219, 0, 0, 0, 0, 23, 176, 0, 73, 75, 187, 0, 0, 185, 186, 195, 199, 193, 0, 0, 146, 140, 145, 0, 143, 96, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 136, 44, 28, 33, 128, 129, 135, 134, 126, 125, 132, 133, 123, 124, 131, 130, 119, 120, 121, 122, 127, 163, 164, 166, 158, 165, 0, 167, 157, 156, 160, 0, 0, 0, 0, 0, 213, 43, 0, 0, 0, 74, 76, 197, 189, 77, 0, 0, 0, 0, 231, 230, 0, 0, 0, 138, 0, 35, 0, 39, 139, 0, 36, 0, 37, 38, 0, 0, 0, 79, 45, 162, 0, 0, 0, 0, 212, 68, 69, 0, 72, 0, 178, 0, 0, 233, 0, 188, 0, 196, 195, 194, 200, 201, 192, 0, 0, 0, 144, 97, 41, 0, 0, 0, 46, 47, 0, 0, 82, 80, 0, 0, 0, 228, 221, 0, 215, 216, 214, 0, 0, 0, 177, 0, 173, 180, 78, 198, 205, 0, 0, 229, 0, 190, 142, 141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 83, 161, 226, 225, 227, 218, 0, 0, 71, 179, 0, 0, 183, 0, 206, 207, 203, 204, 202, 0, 0, 48, 0, 0, 54, 0, 50, 61, 51, 62, 58, 64, 57, 63, 59, 66, 60, 67, 0, 81, 0, 0, 94, 86, 217, 70, 0, 182, 181, 191, 49, 0, 0, 0, 55, 0, 0, 0, 98, 184, 56, 0, 65, 52, 0, 0, 0, 91, 87, 89, 0, 26, 99, 53, 84, 0, 92, 93, 0, 0, 100, 101, 88, 90, 0, 0, 103, 104, 107, 0, 102, 0, 0, 0, 108, 0, 0, 0, 105, 106, 110, 109 }; /* YYDEFGOTO[NTERM-NUM]. */ static const short yydefgoto[] = { -1, 19, 20, 21, 22, 23, 69, 70, 121, 71, 90, 255, 256, 290, 291, 403, 231, 164, 232, 294, 295, 335, 336, 378, 379, 404, 405, 103, 104, 407, 408, 416, 417, 429, 430, 72, 73, 173, 174, 24, 86, 148, 25, 26, 214, 215, 27, 28, 29, 30, 225, 269, 310, 311, 347, 165, 31, 32, 166, 167, 233, 235, 278, 279, 315, 352, 33, 34, 35, 36, 221, 222, 37, 38, 39, 79, 300, 40, 238, 41, 42, 43, 44 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -243 static const short yypact[] = { 701, -11, -243, 52, -243, 19, 83, -243, -243, 116, 107, -243, -243, -243, 156, 137, 683, 98, 182, 190, -243, 74, -243, -243, -243, -243, -243, -243, -243, -243, 155, -243, -243, -243, 205, -243, -243, -243, -243, -243, -243, -243, -243, -243, -243, 215, 120, 216, 221, 222, 236, 138, -243, -243, -243, -243, 194, 143, 154, -243, 177, 178, 181, 186, 187, 244, -243, 244, 244, -7, -243, 133, 494, -243, 170, 197, 49, 121, -243, 201, -243, -243, -243, -243, -243, -243, -243, 220, -243, -243, -49, -243, 91, 297, -243, 231, 16, 251, 238, -243, -243, 93, 317, -243, -243, 244, 11, 258, 244, 244, 244, 219, 223, 224, -243, -243, 326, 182, 206, -243, 321, -243, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 147, 121, 242, 243, -243, -243, -243, -243, -243, -243, 288, -243, 330, 331, 206, -29, -243, 232, 276, -243, -243, -243, 175, 9, -243, -243, -243, 235, -243, 8, 302, -243, -243, 494, 40, -243, 239, 349, 339, 240, 66, 278, 299, 375, -243, 244, 244, -243, -70, -243, -243, -243, -243, 520, 543, 568, 591, 616, 616, 574, 574, 574, 574, -18, -18, -243, -243, -243, -243, -243, -243, -243, -243, 340, -243, -243, -243, -243, 131, 245, 304, 263, 51, -243, -243, 275, 274, 351, -243, -243, -243, -243, -243, 89, 97, 16, 254, -243, -243, 291, 42, 258, -243, 355, -243, 277, -243, -243, 244, -243, 244, -243, -243, 446, 471, 179, 342, -243, -243, -2, 99, 25, 330, -243, -243, -243, 295, -243, 296, -243, 103, 303, -243, -2, -243, 394, -243, 45, -243, 305, -243, -243, 397, 306, -2, -243, -243, -243, 398, 423, 322, 323, -243, 226, 410, 346, -243, 112, 34, 374, -243, -243, 10, -243, -243, -243, 336, 344, 275, -243, 20, 348, -243, -243, -243, -243, 46, 20, -243, -2, -243, -243, -243, 443, 179, 30, -3, 30, 30, 30, 30, 30, 30, 415, 275, 418, 401, -243, -243, -243, -243, -243, -243, 134, 353, -243, -243, 14, 168, -243, 303, -243, -243, -243, 464, -243, 169, 354, -243, 392, 393, -243, -2, -243, -243, -243, -243, -243, -243, -243, -243, -243, -243, -243, -243, 0, -243, 425, 463, 194, -243, -243, -243, 20, -243, -243, -243, -243, -2, 17, 172, -243, -2, 480, 265, 445, -243, -243, 495, -243, -243, 191, 275, 400, 122, 402, -243, 502, -243, -243, -243, 402, 420, -243, -243, 275, 437, 404, -243, -243, -243, 5, 502, 426, -243, -243, 523, -243, 538, 440, 208, -243, 447, -2, 523, -243, -243, -243, -243 }; /* YYPGOTO[NTERM-NUM]. */ static const short yypgoto[] = { -243, -243, -243, -243, 455, -243, -243, 432, -243, -243, 435, -243, 332, -243, 247, -217, -96, -242, -224, -243, -243, -243, -243, -243, -243, 153, 160, 198, -243, -243, -243, -243, 174, -243, 142, -63, -243, 470, 356, -243, -243, 453, -243, -243, 381, -243, -243, -243, -243, -243, -243, -243, -243, 250, -243, -233, -243, -243, -243, 366, -243, -243, -243, 285, -243, -243, -243, -243, -243, -243, -243, 341, -243, -243, -243, 588, -243, -243, -243, -243, -243, -243, -243 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -205 static const short yytable[] = { 163, 276, 114, 158, 115, 116, 360, 268, 422, 390, 423, 122, 254, 424, 158, 158, 151, 229, 303, 158, 82, 158, 159, 159, 160, 158, 159, 117, 160, 45, 158, 159, 152, 123, 296, 158, 159, 292, 172, 177, 236, 83, 176, 213, 172, 180, 181, 182, -204, 350, 237, 85, 141, 152, 47, 46, 338, 314, 142, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 348, 342, 136, 137, 138, 282, 359, 353, 363, 365, 367, 369, 371, 373, 345, -204, 351, 212, 355, 118, 51, 52, 53, 54, 361, 170, 55, 391, 212, 212, 292, 178, 425, 212, 339, 161, 397, 230, 341, 161, 375, 48, 230, 162, 161, 252, 253, 346, 143, 161, 144, 145, 301, 146, 283, 59, 60, 254, 207, 412, 119, 389, 163, 208, 74, 112, 240, 113, 241, 120, 398, 75, 413, 395, 207, 49, 158, 261, 153, 208, 297, 298, 154, 51, 52, 53, 54, 299, 302, 55, 56, 400, 240, 50, 246, 57, 88, 76, 147, 92, 312, 172, 209, 58, 227, 228, 263, 264, 287, 89, 288, 319, 65, 77, 91, 272, 67, 273, 59, 60, 68, 171, 93, 274, 210, 275, 265, 78, 61, 307, 62, 308, 94, 51, 52, 53, 54, 163, 272, 55, 337, 266, 95, 97, 163, 57, 211, 96, 98, 99, 289, 63, 358, 58, 362, 364, 366, 368, 370, 372, 272, 64, 380, 100, 102, 101, 212, 324, 59, 60, 105, 51, 52, 53, 54, 65, 66, 55, 61, 67, 62, 106, 139, 68, 267, 51, 52, 53, 54, 325, 170, 55, 263, 264, 382, 272, 383, 385, 272, 149, 399, 63, 263, 264, 107, 108, 59, 60, 109, 140, 163, 64, 265, 110, 111, 396, 112, 272, 113, 409, 59, 60, 265, 150, 156, 65, 66, 266, 157, 67, 112, 122, 113, 68, 433, 168, 434, 266, 326, 327, 328, 329, 330, 331, 169, 402, 175, 183, 189, 332, 184, 185, 122, 123, 217, 218, 219, 220, 223, 224, 436, 234, 226, 65, 239, 242, 244, 67, 245, 227, 267, 68, 258, 259, 123, 260, 270, 65, 271, 122, 267, 67, 280, 281, 285, 68, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 122, 293, 247, 286, 248, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 305, 306, 123, 249, 313, 250, 122, 317, 309, 316, 323, 318, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 322, 123, 122, 333, 334, 186, 340, 343, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 344, 123, 349, 356, 122, 243, 374, 376, 377, 381, 386, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 122, 314, 387, 388, 251, 392, 393, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 401, 406, 122, 228, 320, 411, 414, 415, 421, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 122, 418, 420, 428, 321, 432, 427, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 431, 155, 247, 122, 188, 435, 187, 262, 410, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 357, 123, 122, 249, 419, 437, 394, 179, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 216, 123, 426, 284, 122, 257, 384, 277, 354, 304, 122, 87, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 122, 0, 0, 0, 0, 123, 0, 0, 0, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 0, 0, 122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 123, 134, 135, 136, 137, 138, 0, 0, 0, 0, 0, 0, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 0, 0, 130, 131, 132, 133, 134, 135, 136, 137, 138, 1, 0, 2, 0, 81, 3, 0, 4, 0, 0, 5, 6, 7, 8, 0, 0, 9, 82, 0, 10, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 78, 0, 0, 0, 83, 0, 84, 0, 0, 0, 0, 12, 0, 13, 85, 14, 0, 15, 0, 16, 0, 17, 0, 0, 0, 0, 0, 0, 0, 18 }; static const short yycheck[] = { 96, 234, 65, 5, 67, 68, 9, 224, 3, 9, 5, 29, 82, 8, 5, 5, 65, 8, 260, 5, 49, 5, 6, 6, 8, 5, 6, 34, 8, 40, 5, 6, 102, 51, 258, 5, 6, 254, 101, 28, 32, 70, 105, 139, 107, 108, 109, 110, 3, 3, 42, 80, 3, 102, 35, 3, 22, 12, 9, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 309, 301, 96, 97, 98, 39, 324, 316, 326, 327, 328, 329, 330, 331, 307, 46, 46, 95, 318, 102, 3, 4, 5, 6, 103, 8, 9, 103, 95, 95, 323, 96, 103, 95, 76, 95, 95, 104, 104, 95, 333, 34, 104, 103, 95, 184, 185, 103, 3, 95, 5, 6, 103, 8, 88, 38, 39, 82, 3, 13, 3, 361, 234, 8, 3, 48, 102, 50, 104, 12, 388, 10, 26, 382, 3, 35, 5, 102, 63, 8, 57, 58, 67, 3, 4, 5, 6, 64, 260, 9, 10, 391, 102, 62, 104, 15, 74, 36, 53, 101, 272, 240, 31, 23, 5, 6, 3, 4, 247, 3, 249, 283, 95, 52, 0, 102, 99, 104, 38, 39, 103, 104, 43, 102, 53, 104, 23, 66, 48, 102, 50, 104, 3, 3, 4, 5, 6, 309, 102, 9, 104, 38, 3, 3, 316, 15, 75, 103, 3, 3, 47, 71, 324, 23, 326, 327, 328, 329, 330, 331, 102, 81, 104, 3, 46, 103, 95, 17, 38, 39, 103, 3, 4, 5, 6, 95, 96, 9, 48, 99, 50, 103, 88, 103, 81, 3, 4, 5, 6, 39, 8, 9, 3, 4, 102, 102, 104, 104, 102, 74, 104, 71, 3, 4, 103, 103, 38, 39, 103, 88, 382, 81, 23, 103, 103, 387, 48, 102, 50, 104, 38, 39, 23, 79, 3, 95, 96, 38, 73, 99, 48, 29, 50, 103, 102, 60, 104, 38, 88, 89, 90, 91, 92, 93, 82, 56, 5, 104, 3, 99, 103, 103, 29, 51, 88, 88, 44, 3, 3, 103, 432, 102, 62, 95, 38, 102, 3, 99, 104, 5, 81, 103, 103, 45, 51, 88, 78, 95, 3, 29, 81, 99, 104, 68, 5, 103, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 29, 37, 102, 104, 104, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 103, 103, 51, 102, 8, 104, 29, 8, 103, 102, 85, 103, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 103, 51, 29, 18, 83, 104, 57, 96, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 104, 51, 102, 8, 29, 104, 39, 37, 55, 104, 104, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 29, 12, 85, 85, 104, 55, 18, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 18, 54, 29, 6, 104, 103, 102, 3, 102, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 29, 104, 88, 3, 104, 88, 103, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 8, 92, 102, 29, 118, 104, 117, 221, 401, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 323, 51, 29, 102, 414, 433, 378, 107, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 140, 51, 421, 240, 29, 217, 349, 234, 316, 261, 29, 16, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 29, -1, -1, -1, -1, 51, -1, -1, -1, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, -1, -1, 29, -1, -1, -1, -1, -1, -1, -1, -1, -1, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 51, 94, 95, 96, 97, 98, -1, -1, -1, -1, -1, -1, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 20, -1, -1, 90, 91, 92, 93, 94, 95, 96, 97, 98, 14, -1, 16, -1, 36, 19, -1, 21, -1, -1, 24, 25, 26, 27, -1, -1, 30, 49, -1, 33, -1, -1, -1, -1, -1, -1, -1, 41, -1, -1, -1, -1, -1, -1, 66, -1, -1, -1, 70, -1, 72, -1, -1, -1, -1, 59, -1, 61, 80, 63, -1, 65, -1, 67, -1, 69, -1, -1, -1, -1, -1, -1, -1, 77 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 14, 16, 19, 21, 24, 25, 26, 27, 30, 33, 41, 59, 61, 63, 65, 67, 69, 77, 106, 107, 108, 109, 110, 144, 147, 148, 151, 152, 153, 154, 161, 162, 171, 172, 173, 174, 177, 178, 179, 182, 184, 185, 186, 187, 40, 3, 35, 34, 35, 62, 3, 4, 5, 6, 9, 10, 15, 23, 38, 39, 48, 50, 71, 81, 95, 96, 99, 103, 111, 112, 114, 140, 141, 3, 10, 36, 52, 66, 180, 20, 36, 49, 70, 72, 80, 145, 180, 74, 3, 115, 0, 101, 43, 3, 3, 103, 3, 3, 3, 3, 103, 46, 132, 133, 103, 103, 103, 103, 103, 103, 103, 48, 50, 140, 140, 140, 34, 102, 3, 12, 113, 29, 51, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 88, 88, 3, 9, 3, 5, 6, 8, 53, 146, 74, 79, 65, 102, 63, 67, 109, 3, 73, 5, 6, 8, 95, 103, 121, 122, 160, 163, 164, 60, 82, 8, 104, 140, 142, 143, 5, 140, 28, 96, 142, 140, 140, 140, 104, 103, 103, 104, 115, 112, 3, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 3, 8, 31, 53, 75, 95, 121, 149, 150, 146, 88, 88, 44, 3, 175, 176, 3, 103, 155, 62, 5, 6, 8, 104, 121, 123, 165, 102, 166, 32, 42, 183, 38, 102, 104, 102, 104, 3, 104, 104, 102, 104, 102, 104, 104, 140, 140, 82, 116, 117, 149, 103, 45, 88, 102, 117, 3, 4, 23, 38, 81, 120, 156, 78, 3, 102, 104, 102, 104, 160, 164, 167, 168, 104, 68, 39, 88, 143, 5, 104, 140, 140, 47, 118, 119, 120, 37, 124, 125, 123, 57, 58, 64, 181, 103, 121, 122, 176, 103, 103, 102, 104, 103, 157, 158, 121, 8, 12, 169, 102, 8, 103, 121, 104, 104, 103, 85, 17, 39, 88, 89, 90, 91, 92, 93, 99, 18, 83, 126, 127, 104, 22, 76, 57, 104, 123, 96, 104, 120, 103, 159, 160, 102, 3, 46, 170, 160, 168, 123, 8, 119, 121, 122, 9, 103, 121, 122, 121, 122, 121, 122, 121, 122, 121, 122, 121, 122, 39, 120, 37, 55, 128, 129, 104, 104, 102, 104, 158, 104, 104, 85, 85, 123, 9, 103, 55, 18, 132, 160, 121, 95, 122, 104, 123, 18, 56, 120, 130, 131, 54, 134, 135, 104, 130, 103, 13, 26, 102, 3, 136, 137, 104, 131, 88, 102, 3, 5, 8, 103, 137, 103, 3, 138, 139, 8, 88, 102, 104, 104, 121, 139 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrlab1 /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror (pParser, "syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ Current.first_line = Rhs[1].first_line; \ Current.first_column = Rhs[1].first_column; \ Current.last_line = Rhs[N].last_line; \ Current.last_column = Rhs[N].last_column; #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval, pParser) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (cinluded). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short *bottom, short *top) #else static void yy_stack_print (bottom, top) short *bottom; short *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylineno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylineno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( SqlParser_c * pParser ); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( SqlParser_c * pParser ) #else int yyparse (pParser) SqlParser_c * pParser ; #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short yyssa[YYINITDEPTH]; short *yyss = yyssa; register short *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 2: { pParser->PushQuery(); ;} break; case 22: { pParser->PushQuery(); ;} break; case 23: { pParser->PushQuery(); ;} break; case 26: { pParser->m_pStmt->m_eStmt = STMT_SELECT; pParser->m_pQuery->m_sIndexes.SetBinary ( pParser->m_pBuf+yyvsp[-6].m_iStart, yyvsp[-6].m_iEnd-yyvsp[-6].m_iStart ); ;} break; case 29: { pParser->AddItem ( &yyvsp[0] ); ;} break; case 32: { pParser->AliasLastItem ( &yyvsp[0] ); ;} break; case 33: { pParser->AliasLastItem ( &yyvsp[0] ); ;} break; case 34: { pParser->AddItem ( &yyvsp[0] ); ;} break; case 35: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_AVG, &yyvsp[-3], &yyvsp[0] ); ;} break; case 36: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_MAX, &yyvsp[-3], &yyvsp[0] ); ;} break; case 37: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_MIN, &yyvsp[-3], &yyvsp[0] ); ;} break; case 38: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_SUM, &yyvsp[-3], &yyvsp[0] ); ;} break; case 39: { if ( !pParser->AddItem ( "count(*)", &yyvsp[-3], &yyvsp[0] ) ) YYERROR; ;} break; case 40: { if ( !pParser->AddItem ( "weight()", &yyvsp[-2], &yyvsp[0] ) ) YYERROR; ;} break; case 41: { if ( !pParser->AddDistinct ( &yyvsp[-1], &yyvsp[-4], &yyvsp[0] ) ) YYERROR; ;} break; case 43: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 49: { if ( !pParser->SetMatch(yyvsp[-1]) ) YYERROR; ;} break; case 50: { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( yyvsp[-2] ); if ( !pFilter ) YYERROR; pFilter->m_dValues.Add ( yyvsp[0].m_iValue ); ;} break; case 51: { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( yyvsp[-2] ); if ( !pFilter ) YYERROR; pFilter->m_dValues.Add ( yyvsp[0].m_iValue ); pFilter->m_bExclude = true; ;} break; case 52: { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( yyvsp[-4] ); if ( !pFilter ) YYERROR; pFilter->m_dValues = *yyvsp[-1].m_pValues.Ptr(); pFilter->m_dValues.Sort(); ;} break; case 53: { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( yyvsp[-5] ); if ( !pFilter ) YYERROR; pFilter->m_dValues = *yyvsp[-1].m_pValues.Ptr(); pFilter->m_bExclude = true; pFilter->m_dValues.Sort(); ;} break; case 54: { if ( !pParser->AddUservarFilter ( yyvsp[-2].m_sValue, yyvsp[0].m_sValue, false ) ) YYERROR; ;} break; case 55: { if ( !pParser->AddUservarFilter ( yyvsp[-3].m_sValue, yyvsp[0].m_sValue, true ) ) YYERROR; ;} break; case 56: { if ( !pParser->AddUintRangeFilter ( yyvsp[-4].m_sValue, yyvsp[-2].m_iValue, yyvsp[0].m_iValue ) ) YYERROR; ;} break; case 57: { if ( !pParser->AddUintRangeFilter ( yyvsp[-2].m_sValue, yyvsp[0].m_iValue+1, UINT_MAX ) ) YYERROR; ;} break; case 58: { if ( !pParser->AddUintRangeFilter ( yyvsp[-2].m_sValue, 0, yyvsp[0].m_iValue-1 ) ) YYERROR; ;} break; case 59: { if ( !pParser->AddUintRangeFilter ( yyvsp[-2].m_sValue, yyvsp[0].m_iValue, UINT_MAX ) ) YYERROR; ;} break; case 60: { if ( !pParser->AddUintRangeFilter ( yyvsp[-2].m_sValue, 0, yyvsp[0].m_iValue ) ) YYERROR; ;} break; case 64: { yyerror ( pParser, "only >=, <=, and BETWEEN floating-point filter types are supported in this version" ); YYERROR; ;} break; case 65: { if ( !pParser->AddFloatRangeFilter ( yyvsp[-4].m_sValue, yyvsp[-2].m_fValue, yyvsp[0].m_fValue ) ) YYERROR; ;} break; case 66: { if ( !pParser->AddFloatRangeFilter ( yyvsp[-2].m_sValue, yyvsp[0].m_fValue, FLT_MAX ) ) YYERROR; ;} break; case 67: { if ( !pParser->AddFloatRangeFilter ( yyvsp[-2].m_sValue, -FLT_MAX, yyvsp[0].m_fValue ) ) YYERROR; ;} break; case 69: { if ( !pParser->SetOldSyntax() ) YYERROR; ;} break; case 70: { yyval.m_sValue = "@count"; if ( !pParser->SetNewSyntax() ) YYERROR; ;} break; case 71: { yyval.m_sValue = "@weight"; if ( !pParser->SetNewSyntax() ) YYERROR; ;} break; case 72: { yyval.m_sValue = "@id"; if ( !pParser->SetNewSyntax() ) YYERROR; ;} break; case 73: { yyval.m_iInstype = TOK_CONST_INT; yyval.m_iValue = yyvsp[0].m_iValue; ;} break; case 74: { yyval.m_iInstype = TOK_CONST_INT; yyval.m_iValue = -yyvsp[0].m_iValue; ;} break; case 75: { yyval.m_iInstype = TOK_CONST_FLOAT; yyval.m_fValue = yyvsp[0].m_fValue; ;} break; case 76: { yyval.m_iInstype = TOK_CONST_FLOAT; yyval.m_fValue = -yyvsp[0].m_fValue; ;} break; case 77: { assert ( !yyval.m_pValues.Ptr() ); yyval.m_pValues = new RefcountedVector_c (); yyval.m_pValues->Add ( yyvsp[0].m_iValue ); ;} break; case 78: { yyval.m_pValues->Add ( yyvsp[0].m_iValue ); ;} break; case 81: { pParser->m_pQuery->m_eGroupFunc = SPH_GROUPBY_ATTR; pParser->m_pQuery->m_sGroupBy = yyvsp[0].m_sValue; ;} break; case 84: { pParser->m_pQuery->m_sSortBy.SetBinary ( pParser->m_pBuf+yyvsp[0].m_iStart, yyvsp[0].m_iEnd-yyvsp[0].m_iStart ); ;} break; case 87: { pParser->m_pQuery->m_sOrderBy.SetBinary ( pParser->m_pBuf+yyvsp[0].m_iStart, yyvsp[0].m_iEnd-yyvsp[0].m_iStart ); ;} break; case 88: { pParser->m_pQuery->m_sOrderBy = "@random"; ;} break; case 90: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 92: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 93: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 96: { pParser->m_pQuery->m_iOffset = 0; pParser->m_pQuery->m_iLimit = yyvsp[0].m_iValue; ;} break; case 97: { pParser->m_pQuery->m_iOffset = yyvsp[-2].m_iValue; pParser->m_pQuery->m_iLimit = yyvsp[0].m_iValue; ;} break; case 103: { if ( !pParser->AddOption ( yyvsp[-2], yyvsp[0] ) ) YYERROR; ;} break; case 104: { if ( !pParser->AddOption ( yyvsp[-2], yyvsp[0] ) ) YYERROR; ;} break; case 105: { if ( !pParser->AddOption ( yyvsp[-4], pParser->GetNamedVec ( yyvsp[-1].m_iValue ) ) ) YYERROR; pParser->FreeNamedVec ( yyvsp[-1].m_iValue ); ;} break; case 106: { if ( !pParser->AddOption ( yyvsp[-5], yyvsp[-2], yyvsp[-1].m_sValue ) ) YYERROR; ;} break; case 107: { if ( !pParser->AddOption ( yyvsp[-2], yyvsp[0] ) ) YYERROR; ;} break; case 108: { yyval.m_iValue = pParser->AllocNamedVec (); pParser->AddConst ( yyval.m_iValue, yyvsp[0] ); ;} break; case 109: { pParser->AddConst( yyval.m_iValue, yyvsp[0] ); ;} break; case 110: { yyval.m_sValue = yyvsp[-2].m_sValue; yyval.m_iValue = yyvsp[0].m_iValue; ;} break; case 112: { if ( !pParser->SetOldSyntax() ) YYERROR; ;} break; case 113: { if ( !pParser->SetNewSyntax() ) YYERROR; ;} break; case 117: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 118: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 119: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 120: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 121: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 122: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 123: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 124: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 125: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 126: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 127: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 128: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 129: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 130: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 131: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 132: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 133: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 134: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 135: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 136: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 138: { yyval = yyvsp[-3]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 139: { yyval = yyvsp[-3]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 140: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd ;} break; case 141: { yyval = yyvsp[-5]; yyval.m_iEnd = yyvsp[0].m_iEnd ;} break; case 142: { yyval = yyvsp[-5]; yyval.m_iEnd = yyvsp[0].m_iEnd ;} break; case 148: { pParser->m_pStmt->m_eStmt = STMT_SHOW_WARNINGS; ;} break; case 149: { pParser->m_pStmt->m_eStmt = STMT_SHOW_STATUS; ;} break; case 150: { pParser->m_pStmt->m_eStmt = STMT_SHOW_META; ;} break; case 156: { pParser->SetStatement ( yyvsp[-2], SET_LOCAL ); pParser->m_pStmt->m_iSetValue = yyvsp[0].m_iValue; ;} break; case 157: { pParser->SetStatement ( yyvsp[-2], SET_LOCAL ); pParser->m_pStmt->m_sSetValue = yyvsp[0].m_sValue; ;} break; case 158: { pParser->SetStatement ( yyvsp[-2], SET_LOCAL ); pParser->m_pStmt->m_bSetNull = true; ;} break; case 159: { pParser->m_pStmt->m_eStmt = STMT_DUMMY; ;} break; case 160: { pParser->m_pStmt->m_eStmt = STMT_DUMMY; ;} break; case 161: { pParser->SetStatement ( yyvsp[-4], SET_GLOBAL_UVAR ); pParser->m_pStmt->m_dSetValues = *yyvsp[-1].m_pValues.Ptr(); ;} break; case 162: { pParser->SetStatement ( yyvsp[-2], SET_GLOBAL_SVAR ); pParser->m_pStmt->m_sSetValue = yyvsp[0].m_sValue; ;} break; case 165: { yyval.m_iValue = 1; ;} break; case 166: { yyval.m_iValue = 0; ;} break; case 167: { yyval.m_iValue = yyvsp[0].m_iValue; if ( yyval.m_iValue!=0 && yyval.m_iValue!=1 ) { yyerror ( pParser, "only 0 and 1 could be used as boolean values" ); YYERROR; } ;} break; case 168: { pParser->m_pStmt->m_eStmt = STMT_COMMIT; ;} break; case 169: { pParser->m_pStmt->m_eStmt = STMT_ROLLBACK; ;} break; case 170: { pParser->m_pStmt->m_eStmt = STMT_BEGIN; ;} break; case 173: { // everything else is pushed directly into parser within the rules pParser->m_pStmt->m_sIndex = yyvsp[-3].m_sValue; ;} break; case 174: { pParser->m_pStmt->m_eStmt = STMT_INSERT; ;} break; case 175: { pParser->m_pStmt->m_eStmt = STMT_REPLACE; ;} break; case 178: { if ( !pParser->AddSchemaItem ( &yyvsp[0] ) ) { yyerror ( pParser, "unknown field" ); YYERROR; } ;} break; case 179: { if ( !pParser->AddSchemaItem ( &yyvsp[0] ) ) { yyerror ( pParser, "unknown field" ); YYERROR; } ;} break; case 182: { if ( !pParser->m_pStmt->CheckInsertIntegrity() ) { yyerror ( pParser, "wrong number of values here" ); YYERROR; } ;} break; case 183: { AddInsval ( pParser->m_pStmt->m_dInsertValues, yyvsp[0] ); ;} break; case 184: { AddInsval ( pParser->m_pStmt->m_dInsertValues, yyvsp[0] ); ;} break; case 185: { yyval.m_iInstype = TOK_CONST_INT; yyval.m_iValue = yyvsp[0].m_iValue; ;} break; case 186: { yyval.m_iInstype = TOK_CONST_FLOAT; yyval.m_fValue = yyvsp[0].m_fValue; ;} break; case 187: { yyval.m_iInstype = TOK_QUOTED_STRING; yyval.m_sValue = yyvsp[0].m_sValue; ;} break; case 188: { yyval.m_iInstype = TOK_CONST_MVA; yyval.m_pValues = yyvsp[-1].m_pValues; ;} break; case 189: { yyval.m_iInstype = TOK_CONST_MVA; ;} break; case 190: { pParser->m_pStmt->m_eStmt = STMT_DELETE; pParser->m_pStmt->m_sIndex = yyvsp[-4].m_sValue; pParser->m_pStmt->m_dDeleteIds.Add ( yyvsp[0].m_iValue ); ;} break; case 191: { pParser->m_pStmt->m_eStmt = STMT_DELETE; pParser->m_pStmt->m_sIndex = yyvsp[-6].m_sValue; for ( int i=0; iGetLength(); i++ ) pParser->m_pStmt->m_dDeleteIds.Add ( (*yyvsp[-1].m_pValues.Ptr())[i] ); ;} break; case 192: { pParser->m_pStmt->m_eStmt = STMT_CALL; pParser->m_pStmt->m_sCallProc = yyvsp[-4].m_sValue; ;} break; case 193: { AddInsval ( pParser->m_pStmt->m_dInsertValues, yyvsp[0] ); ;} break; case 194: { AddInsval ( pParser->m_pStmt->m_dInsertValues, yyvsp[0] ); ;} break; case 196: { yyval.m_iInstype = TOK_CONST_STRINGS; ;} break; case 197: { // FIXME? for now, one such array per CALL statement, tops if ( pParser->m_pStmt->m_dCallStrings.GetLength() ) { yyerror ( pParser, "unexpected constant string list" ); YYERROR; } pParser->m_pStmt->m_dCallStrings.Add ( yyvsp[0].m_sValue ); ;} break; case 198: { pParser->m_pStmt->m_dCallStrings.Add ( yyvsp[0].m_sValue ); ;} break; case 201: { assert ( pParser->m_pStmt->m_dCallOptNames.GetLength()==1 ); assert ( pParser->m_pStmt->m_dCallOptValues.GetLength()==1 ); ;} break; case 203: { pParser->m_pStmt->m_dCallOptNames.Add ( yyvsp[0].m_sValue ); AddInsval ( pParser->m_pStmt->m_dCallOptValues, yyvsp[-2] ); ;} break; case 207: { yyval.m_sValue = "limit"; ;} break; case 208: { pParser->m_pStmt->m_eStmt = STMT_DESC; pParser->m_pStmt->m_sIndex = yyvsp[0].m_sValue; ;} break; case 211: { pParser->m_pStmt->m_eStmt = STMT_SHOW_TABLES; ;} break; case 212: { if ( !pParser->UpdateStatement ( &yyvsp[-3] ) ) YYERROR; ;} break; case 215: { pParser->UpdateAttr ( yyvsp[-2].m_sValue, &yyvsp[0] ); ;} break; case 216: { pParser->UpdateAttr ( yyvsp[-2].m_sValue, &yyvsp[0], SPH_ATTR_FLOAT); ;} break; case 217: { pParser->UpdateMVAAttr ( yyvsp[-4].m_sValue, yyvsp[-1] ); ;} break; case 218: { SqlNode_t tNoValues; pParser->UpdateMVAAttr ( yyvsp[-3].m_sValue, tNoValues ); ;} break; case 219: { pParser->m_pStmt->m_eStmt = STMT_SHOW_VARIABLES; ;} break; case 220: { pParser->m_pStmt->m_eStmt = STMT_DUMMY; ;} break; case 221: { pParser->m_pStmt->m_eStmt = STMT_DUMMY; ;} break; case 229: { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_CREATE_FUNC; tStmt.m_sUdfName = yyvsp[-4].m_sValue; tStmt.m_sUdfLib = yyvsp[0].m_sValue; tStmt.m_eUdfType = (ESphAttr) yyvsp[-2].m_iValue; ;} break; case 230: { yyval.m_iValue = SPH_ATTR_INTEGER; ;} break; case 231: { yyval.m_iValue = SPH_ATTR_FLOAT; ;} break; case 232: { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_DROP_FUNC; tStmt.m_sUdfName = yyvsp[0].m_sValue; ;} break; case 233: { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_ATTACH_INDEX; tStmt.m_sIndex = yyvsp[-3].m_sValue; tStmt.m_sSetName = yyvsp[0].m_sValue; ;} break; case 234: { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_FLUSH_RTINDEX; tStmt.m_sIndex = yyvsp[0].m_sValue; ;} break; case 235: { pParser->m_pStmt->m_eStmt = STMT_DUMMY; ;} break; } /* Line 991 of yacc.c. */ yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); char *yymsg; int yyx, yycount; yycount = 0; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) yysize += yystrlen (yytname[yyx]) + 15, yycount++; yysize += yystrlen ("syntax error, unexpected ") + 1; yysize += yystrlen (yytname[yytype]); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 4) { yycount = 0; for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { const char *yyq = ! yycount ? ", expecting " : " or "; yyp = yystpcpy (yyp, yyq); yyp = yystpcpy (yyp, yytname[yyx]); yycount++; } } else { for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { snprintf (yyp, (int)(yysize - (yyp - yymsg)), ", expecting %s (or %d other tokens)", yytname[yyx], yycount - 1); while (*yyp++); break; } } yyerror (pParser, yymsg); YYSTACK_FREE (yymsg); } else yyerror (pParser, "syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror (pParser, "syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ /* Return failure if at end of input. */ if (yychar == YYEOF) { /* Pop the error token. */ YYPOPSTACK; /* Pop the rest of the stack. */ while (yyss < yyssp) { YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); YYPOPSTACK; } YYABORT; } YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab2; /*----------------------------------------------------. | yyerrlab1 -- error raised explicitly by an action. | `----------------------------------------------------*/ yyerrlab1: /* Suppress GCC warning that yyerrlab1 is unused when no action invokes YYERROR. */ #if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__) // __attribute__ ((__unused__)) #endif goto yyerrlab2; /*---------------------------------------------------------------. | yyerrlab2 -- pop states until the error token can be shifted. | `---------------------------------------------------------------*/ yyerrlab2: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); yyvsp--; yystate = *--yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror (pParser, "parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxstemen.cpp0000644000176700017710000003563511711621267020003 0ustar deogardeogar// // $Id: sphinxstemen.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" // for UNALIGNED_RAM_ACCESS #if defined(_MSC_VER) && !defined(__cplusplus) #define inline #endif // #define SNOWBALL2011 static unsigned char stem_en_doubles[] = "bdfgmnprt"; static unsigned char vowel_map[] = "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 0 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 1 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 2 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 3 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 4 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 5 //` a b c d e f g h i j k l m n o - NOLINT "\0\1\0\0\0\1\0\0\0\1\0\0\0\0\0\1" // 6 //p q r s t u v w x y z - NOLINT "\0\0\0\0\0\1\0\0\0\1\0\0\0\0\0\0" // 7 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 8 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // 9 "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // a "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // b "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // c "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // d "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" // e "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"; // f #define is_vowel(idx) vowel_map[word[idx]] static inline int stem_en_id ( unsigned char l ) { register unsigned char * v = stem_en_doubles; while ( *v && *v!=l ) v++; return ( *v==l ) ? 1 : 0; } static inline int stem_en_ivwxy ( unsigned char l ) { return vowel_map[l] || l=='w' || l=='x' || l=='Y'; } void stem_en_init () { } #define EXCBASE(b) ( iword==( ( (int)b[3]<<24 ) + ( (int)b[2]<<16 ) + ( (int)b[1]<<8 ) + (int)b[0] ) ) #define EXC4(a,b) ( len==4 && EXCBASE(b) ) #define EXC5(a,b) ( len==5 && EXCBASE(b) ) #define EXC6(a,b) ( len==6 && EXCBASE(b) && a[4]==b[4] ) #define EXC7(a,b) ( len==7 && EXCBASE(b) && a[4]==b[4] && a[5]==b[5] ) #define EXC8(a,b) ( len==8 && EXCBASE(b) && a[4]==b[4] && a[5]==b[5] && a[6]==b[6] ) void stem_en ( unsigned char * word, int len ) { int i, first_vowel, r1, r2, iword; unsigned char has_Y = 0; if ( len<=2 ) return; #if UNALIGNED_RAM_ACCESS iword = *(int*)word; #else iword = ( (int)word[3]<<24 ) + ( (int)word[2]<<16 ) + ( (int)word[1]<<8 ) + (int)word[0]; #endif // check for 3-letter exceptions (currently just one, "sky") and shortcuts if ( len==3 ) { #define CHECK3(c1,c2,c3) if ( iword==( (c1<<0)+(c2<<8)+(c3<<16) ) ) return; #ifdef SNOWBALL2011 #define CHECK3A CHECK3 #else #define CHECK3A(c1,c2,c3) if ( iword==( (c1<<0)+(c2<<8)+(c3<<16) ) ) { word[2] = '\0'; return; } #endif CHECK3 ( 't', 'h', 'e' ); CHECK3 ( 'a', 'n', 'd' ); CHECK3 ( 'y', 'o', 'u' ); CHECK3A ( 'w', 'a', 's' ); CHECK3A ( 'h', 'i', 's' ); CHECK3 ( 'f', 'o', 'r' ); CHECK3 ( 'h', 'e', 'r' ); CHECK3 ( 's', 'h', 'e' ); CHECK3 ( 'b', 'u', 't' ); CHECK3 ( 'h', 'a', 'd' ); CHECK3 ( 's', 'k', 'y' ); } // check for 4..8-letter exceptions if ( len>=4 && len<=8 ) { // check for 4-letter exceptions and shortcuts if ( len==4 ) { // shortcuts if ( iword==0x74616874 ) return; // that if ( iword==0x68746977 ) return; // with if ( iword==0x64696173 ) return; // said if ( iword==0x6d6f7266 ) return; // from // exceptions if ( iword==0x7377656e ) return; // news if ( iword==0x65776f68 ) return; // howe } // all those exceptions only have a few valid endings; early check switch ( word[len-1] ) { case 'd': if ( EXC7 ( word, "proceed" ) ) return; if ( EXC6 ( word, "exceed" ) ) return; if ( EXC7 ( word, "succeed" ) ) return; break; case 'g': if ( EXC5 ( word, "dying" ) ) { word[1] = 'i'; word[2] = 'e'; word[3] = '\0'; return; } if ( EXC5 ( word, "lying" ) ) { word[1] = 'i'; word[2] = 'e'; word[3] = '\0'; return; } if ( EXC5 ( word, "tying" ) ) { word[1] = 'i'; word[2] = 'e'; word[3] = '\0'; return; } if ( EXC6 ( word, "inning" ) ) return; if ( EXC6 ( word, "outing" ) ) return; if ( EXC7 ( word, "canning" ) ) return; #ifdef SNOWBALL2011 if ( EXC7 ( word, "herring" ) ) return; if ( EXC7 ( word, "earring" ) ) return; #endif break; case 's': if ( EXC5 ( word, "skies" ) ) { word[2] = 'y'; word[3] = '\0'; return; } if ( EXC7 ( word, "innings" ) ) { word[6] = '\0'; return; } if ( EXC7 ( word, "outings" ) ) { word[6] = '\0';return; } if ( EXC8 ( word, "cannings" ) ) { word[7] = '\0';return; } #ifdef SNOWBALL2011 if ( EXC4 ( word, "skis" ) ) { word[3] = '\0'; return; } if ( EXC5 ( word, "atlas" ) ) return; if ( EXC6 ( word, "cosmos" ) ) return; if ( EXC4 ( word, "bias" ) ) return; if ( EXC5 ( word, "andes" ) ) return; if ( EXC8 ( word, "herrings" ) ) { word[7] = '\0'; return; } if ( EXC8 ( word, "earrings" ) ) { word[7] = '\0'; return; } if ( EXC8 ( word, "proceeds" ) ) { word[7] = '\0'; return; } if ( EXC7 ( word, "exceeds" ) ) { word[6] = '\0'; return; } if ( EXC8 ( word, "succeeds" ) ) { word[7] = '\0'; return; } #endif break; case 'y': if ( EXC4 ( word, "idly" ) ) { word[3] = '\0';return; } if ( EXC6 ( word, "gently" ) ) { word[5] = '\0';return; } if ( EXC4 ( word, "ugly" ) ) { word[3] = 'i'; word[4] = '\0'; return; } if ( EXC5 ( word, "early" ) ) { word[4] = 'i'; word[5] = '\0'; return; } if ( EXC4 ( word, "only" ) ) { word[3] = 'i'; word[4] = '\0'; return; } if ( EXC6 ( word, "singly" ) ) { word[5] = '\0'; return; } break; } } // hide consonant-style y's if ( word[0]=='y' ) word[0] = has_Y = 'Y'; for ( i=1; i=5 && EXCBASE("gene") && word[4]=='r' ) { r1 = 5; // gener- first_vowel = 1; } #ifdef SNOWBALL2011 else if ( len>=6 && EXCBASE("comm") && word[4]=='u' && word[5]=='n' ) { r1 = 6; // commun- first_vowel = 1; } else if ( len>=5 && EXCBASE("arse") && word[4]=='n' ) { r1 = 5; // arsen- first_vowel = 0; } #endif else { for ( i=0; i=2 && W(1,c1) && W(2,c2) ) #define SUFF3(c3,c2,c1) ( len>=3 && W(1,c1) && W(2,c2) && W(3,c3) ) #define SUFF4(c4,c3,c2,c1) ( len>=4 && W(1,c1) && W(2,c2) && W(3,c3) && W(4,c4) ) #define SUFF5(c5,c4,c3,c2,c1) ( len>=5 && W(1,c1) && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) ) #define SUFF6(c6,c5,c4,c3,c2,c1) ( len>=6 && W(1,c1) && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) && W(6,c6) ) #define SUFF7(c7,c6,c5,c4,c3,c2,c1) ( len>=7 && W(1,c1) && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) && W(6,c6) && W(7,c7) ) #define SUFF3A(c3,c2) ( len>=3 && W(2,c2) && W(3,c3) ) #define SUFF4A(c4,c3,c2) ( len>=4 && W(2,c2) && W(3,c3) && W(4,c4) ) #define SUFF5A(c5,c4,c3,c2) ( len>=5 && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) ) #define SUFF6A(c6,c5,c4,c3,c2) ( len>=6 && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) && W(6,c6) ) #define SUFF7A(c7,c6,c5,c4,c3,c2) ( len>=6 && W(2,c2) && W(3,c3) && W(4,c4) && W(5,c5) && W(6,c6) && W(7,c7) ) /////////// // STEP 1A /////////// #ifdef SNOWBALL2011 #define IED_ACTION { if ( len-->4 ) len--; } #else #define IED_ACTION { if ( len--!=4 ) len--; } #endif switch ( word[len-1] ) { case 'd': if ( word[len-3]=='i' && word[len-2]=='e' ) IED_ACTION break; case 's': if ( SUFF4 ( 's', 's', 'e', 's' ) ) // faster that suff4a for some reason! len -= 2; else if ( word[len-3]=='i' && word[len-2]=='e' ) IED_ACTION else if ( word[len-2]!='u' && word[len-2]!='s' ) { #ifdef SNOWBALL2011 if ( first_vowel<=len-3 ) #endif len--; } break; } /////////// // STEP 1B /////////// i = 0; switch ( word[len-1] ) { case 'd': if ( SUFF3A ( 'e', 'e' ) ) { if ( len-3>=r1 ) len--; break; } if ( word[len-2]=='e' ) i = 2; break; case 'y': if ( word[len-2]=='l' ) { if ( SUFF5A ( 'e', 'e', 'd', 'l' ) ) { if ( len-5>=r1 ) len -= 3; break; } if ( SUFF4A ( 'e', 'd', 'l' ) ) { i = 4; break; } if ( SUFF5A ( 'i', 'n', 'g', 'l' ) ) { i = 5; break; } } break; case 'g': if ( SUFF3A ( 'i', 'n' ) ) i = 3; break; } if ( i && first_vowel=2 && word[len-1]==word[len-2] && stem_en_id ( word[len-1] ) ) len--; else if ( ( len==2 && is_vowel(0) && !is_vowel(1) ) || ( len==r1 && !is_vowel ( len-3 ) && is_vowel ( len-2 ) && !stem_en_ivwxy ( word[len-1] ) ) ) { word[len++] = 'e'; } } /////////// // STEP 1C /////////// if ( len>2 && ( word[len-1]=='y' || word[len-1]=='Y' ) && !is_vowel ( len-2 ) ) { word[len-1] = 'i'; } ////////// // STEP 2 ////////// if ( len-2>=r1 ) switch ( word[len-1] ) { case 'i': if ( len>=3 && ( W ( 2, 'c' ) || W ( 2, 'l' ) || W ( 2, 't' ) ) ) { if ( SUFF4A ( 'e', 'n', 'c' ) ) { if ( len-4>=r1 ) word[len-1] = 'e'; break; } if ( SUFF4A ( 'a', 'n', 'c' ) ) { if ( len-4>=r1 ) word[len-1] = 'e'; break; } if ( SUFF4A ( 'a', 'b', 'l' ) ) { if ( len-4>=r1 ) word[len-1] = 'e'; break; } if ( SUFF3A ( 'b', 'l' ) ) { if ( len-3>=r1 ) word[len-1] = 'e'; break; } if ( SUFF5A ( 'e', 'n', 't', 'l' ) ) { if ( len-5>=r1 ) len -= 2; break; } if ( SUFF5A ( 'a', 'l', 'i', 't' ) ) { if ( len-5>=r1 ) len -= 3; break; } if ( SUFF5A ( 'o', 'u', 's', 'l' ) ) { if ( len-5>=r1 ) len -= 2; break; } if ( SUFF5A ( 'i', 'v', 'i', 't' ) ) { if ( len-5>=r1 ) { word[len-3] = 'e'; len -= 2; } break; } if ( SUFF6A ( 'b', 'i', 'l', 'i', 't' ) ) { if ( len-6>=r1 ) { word[len-5] = 'l'; word[len-4] = 'e'; len -= 3; } break; } if ( SUFF5A ( 'f', 'u', 'l', 'l' ) ) { if ( len-5>=r1 ) len -= 2; break; } if ( SUFF6A ( 'l', 'e', 's', 's', 'l' ) ) { if ( len-6>=r1 ) len -= 2; break; } } #ifdef SNOWBALL2011 if ( len-3>=r1 && SUFF3A ( 'o', 'g' ) && word[len-4]=='l' ) { len -= 1; break; } #else if ( len-3>=r1 && SUFF3A ( 'o', 'g' ) ) { len -= 1; break; } #endif if ( len-2>=r1 && word[len-2]=='l' ) len -= 2; else break; if ( len-2>=r1 && SUFF2 ( 'a', 'l' ) ) { len -= 2; if ( len-5>=r1 && SUFF5 ( 'a', 't', 'i', 'o', 'n' ) ) { len -= 3; word[len++] = 'e'; break; } if ( SUFF4 ( 't', 'i', 'o', 'n' ) ) break; len += 2; } else { switch ( word[len-1] ) { case 'b': case 'c': case 'd': case 'e': case 'g': case 'h': case 'k': case 'm': case 'n': case 'r': case 't': break; default: len += 2; break; } } break; case 'l': if ( SUFF7A ( 'a', 't', 'i', 'o', 'n', 'a' ) ) { if ( len-7>=r1 ) { word[len-5] = 'e'; len -= 4; } break; } if ( SUFF6A ( 't', 'i', 'o', 'n', 'a' ) ) { if ( len-6>=r1 ) len -= 2; break; } break; case 'm': if ( SUFF5A ( 'a', 'l', 'i', 's' ) ) { if ( len-5>=r1 ) len -= 3; break; } break; case 'n': if ( SUFF7A ( 'i', 'z', 'a', 't', 'i', 'o' ) ) { if ( len-7>=r1 ) { word[len-5] = 'e'; len -= 4; } break; } if ( SUFF5A ( 'a', 't', 'i', 'o' ) ) { if ( len-5>=r1 ) { word[len-3] = 'e'; len -= 2; } break; } break; case 'r': if ( SUFF4A ( 'i', 'z', 'e' ) ) { if ( len-4>=r1 ) len -= 1; break; } if ( SUFF4A ( 'a', 't', 'o' ) ) { if ( len-4>=r1 ) { word[len-2] = 'e'; len -= 1; } break; } break; case 's': if ( len-7>=r1 && ( SUFF7A ( 'f', 'u', 'l', 'n', 'e', 's' ) || SUFF7A ( 'o', 'u', 's', 'n', 'e', 's' ) || SUFF7A ( 'i', 'v', 'e', 'n', 'e', 's' ) ) ) { len -= 4; } break; } ////////// // STEP 3 ////////// if ( len-3>=r1 ) switch ( word[len-1] ) { case 'e': if ( SUFF5A ( 'a', 'l', 'i', 'z' ) ) { if ( len-5>=r1 ) len -= 3; break; } if ( SUFF5A ( 'i', 'c', 'a', 't' ) ) { if ( len-5>=r1 ) len -= 3; break; } #ifdef SNOWBALL2011 if ( SUFF5A ( 'a', 't', 'i', 'v' ) ) { if ( len-5>=r2 ) len -= 5; break; } #else if ( SUFF5A ( 'a', 't', 'i', 'v' ) ) { if ( len-5>=r1 ) len -= 5; break; } #endif break; case 'i': if ( SUFF5A ( 'i', 'c', 'i', 't' ) ) { if ( len-5>=r1 ) len -= 3; break; } break; case 'l': if ( SUFF4A ( 'i', 'c', 'a' ) ) { if ( len-4>=r1 ) len -= 2; break; } if ( SUFF3A ( 'f', 'u' ) ) { len -= 3; break; } break; case 's': if ( SUFF4A ( 'n', 'e', 's' ) ) { if ( len-4>=r1 ) len -= 4; break; } break; } ////////// // STEP 4 ////////// if ( len-2>=r2 ) switch ( word[len-1] ) { case 'c': if ( word[len-2]=='i' ) len -= 2; // -ic break; case 'e': if ( len-3>=r2 ) { if ( SUFF4A ( 'a', 'n', 'c' ) ) { if ( len-4>=r2 ) len -= 4; break; } if ( SUFF4A ( 'e', 'n', 'c' ) ) { if ( len-4>=r2 ) len -= 4; break; } if ( SUFF4A ( 'a', 'b', 'l' ) ) { if ( len-4>=r2 ) len -= 4; break; } if ( SUFF4A ( 'i', 'b', 'l' ) ) { if ( len-4>=r2 ) len -= 4; break; } if ( SUFF3A ( 'a', 't' ) ) { len -= 3; break; } if ( SUFF3A ( 'i', 'v' ) ) { len -= 3; break; } if ( SUFF3A ( 'i', 'z' ) ) { len -= 3; break; } } break; case 'i': if ( SUFF3A ( 'i', 't' ) ) { if ( len-3>=r2 ) len -= 3; break; } break; case 'l': if ( word[len-2]=='a' ) len -= 2; // -al break; case 'm': if ( SUFF3A ( 'i', 's' ) ) { if ( len-3>=r2 ) len -= 3; break; } break; case 'n': if ( len-3>=r2 && SUFF3 ( 'i', 'o', 'n' ) && ( word[len-4]=='t' || word[len-4]=='s' ) ) len -= 3; break; case 'r': if ( word[len-2]=='e' ) len -= 2; // -er break; case 's': if ( SUFF3A ( 'o', 'u' ) ) { if ( len-3>=r2 ) len -= 3; break; } break; case 't': if ( word[len-2]=='n' ) { if ( SUFF5A ( 'e', 'm', 'e', 'n' ) ) { if ( len-5>=r2 ) len -= 5; break; } if ( SUFF4A ( 'm', 'e', 'n' ) ) { if ( len-4>=r2 ) len -= 4; break; } if ( SUFF3A ( 'a', 'n' ) ) { if ( len-3>=r2 ) len -= 3; break; } if ( SUFF3A ( 'e', 'n' ) ) { if ( len-3>=r2 ) len -= 3; break; } } break; } ////////// // STEP 5 ////////// #ifdef SNOWBALL2011 if ( len>r2 && word[len-1]=='l' && word[len-2]=='l' ) len--; else #endif while ( word[len-1]=='e' ) { if ( len>r2 ) { len--; break; } if ( len<=r1 ) break; if ( len>3 && !is_vowel ( len-4 ) && is_vowel ( len-3 ) && !stem_en_ivwxy ( word[len-2] ) ) break; if ( len==3 && is_vowel(0) && !is_vowel(1) ) break; len--; break; } #ifndef SNOWBALL2011 if ( len>r2 && word[len-1]=='l' && word[len-2]=='l' ) len--; #endif //////////// // FINALIZE //////////// word[len] = 0; if ( has_Y ) for ( i=0; i) { next if ( !/\$Id: \S+ (\d+)/ ); $maxrev = $1 if ( $1>$maxrev ); } print < #include #include #ifdef _MSC_VER #define snprintf _snprintf #define DLLEXPORT __declspec(dllexport) #else #define DLLEXPORT #endif /// UDF initialization /// gets called on every query, when query begins /// args are filled with values for a particular query DLLEXPORT int sequence_init ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_message ) { // check argument count if ( args->arg_count > 1 ) { snprintf ( error_message, SPH_UDF_ERROR_LEN, "SEQUENCE() takes either 0 or 1 arguments" ); return 1; } // check argument type if ( args->arg_count && args->arg_types[0]!=SPH_UDF_TYPE_UINT32 ) { snprintf ( error_message, SPH_UDF_ERROR_LEN, "SEQUENCE() requires 1st argument to be uint" ); return 1; } // allocate and init counter storage init->func_data = (void*) malloc ( sizeof(int) ); if ( !init->func_data ) { snprintf ( error_message, SPH_UDF_ERROR_LEN, "malloc() failed" ); return 1; } *(int*)init->func_data = 1; // all done return 0; } /// UDF deinitialization /// gets called on every query, when query ends DLLEXPORT void sequence_deinit ( SPH_UDF_INIT * init ) { // deallocate storage if ( init->func_data ) { free ( init->func_data ); init->func_data = NULL; } } /// UDF implementation /// gets called for every row, unless optimized away DLLEXPORT sphinx_int64_t sequence ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_flag ) { int res = (*(int*)init->func_data)++; if ( args->arg_count ) res += *(int*)args->arg_values[0]; return res; } ////////////////////////////////////////////////////////////////////////// DLLEXPORT int strtoint_init ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_message ) { if ( args->arg_count!=1 || args->arg_types[0]!=SPH_UDF_TYPE_STRING ) { snprintf ( error_message, SPH_UDF_ERROR_LEN, "STRTOINT() requires 1 string argument" ); return 1; } return 0; } DLLEXPORT sphinx_int64_t strtoint ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_flag ) { const char * s = args->arg_values[0]; int len = args->str_lengths[0], res = 0; while ( len>0 && *s>='0' && *s<='9' ) { res += *s - '0'; len--; } return res; } ////////////////////////////////////////////////////////////////////////// DLLEXPORT int avgmva_init ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_message ) { if ( args->arg_count!=1 || ( args->arg_types[0]!=SPH_UDF_TYPE_UINT32SET && args->arg_types[0]!=SPH_UDF_TYPE_UINT64SET ) ) { snprintf ( error_message, SPH_UDF_ERROR_LEN, "AVGMVA() requires 1 MVA argument" ); return 1; } // store our mva vs mva64 flag to func_data init->func_data = (void*)(int)( args->arg_types[0]==SPH_UDF_TYPE_UINT64SET ? 1 : 0 ); return 0; } DLLEXPORT double avgmva ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_flag ) { unsigned int * mva = (unsigned int *) args->arg_values[0]; double res = 0; int i, n, is64; if ( !mva ) return res; // Both MVA32 and MVA64 are stored as dword (unsigned 32-bit) arrays. // The first dword stores the array length (always in dwords too), and // the next ones store the values. In pseudocode: // // unsigned int num_dwords // unsigned int data [ num_dwords ] // // With MVA32, this lets you access the values pretty naturally. // // With MVA64, however, we have to do a few tricks: // a) divide num_dwords by 2 to get the number of 64-bit elements, // b) assemble those 64-bit values from dword pairs. // // The latter is required for architectures where non-aligned // 64-bit access crashes. On Intel, we could have also done it // like this: // // int * raw_ptr = (int*) args->arg_values[0]; // int mva64_count = (*raw_ptr) / 2; // sphinx_uint64_t * mva64_values = (sphinx_uint64_t*)(raw_ptr + 1); // pull "mva32 or mva64" flag (that we stored in _init) from func_data is64 = (int)(init->func_data) != 0; if ( is64 ) { // handle mva64 n = *mva++ / 2; for ( i=0; i #include #include #include #include #include #include #include #include #include #include #define SEARCHD_BACKLOG 5 #define SPHINXAPI_PORT 9312 #define SPHINXQL_PORT 9306 #define SPH_ADDRESS_SIZE sizeof("000.000.000.000") #define SPH_ADDRPORT_SIZE sizeof("000.000.000.000:00000") #define MVA_UPDATES_POOL 1048576 #define NETOUTBUF 8192 // don't shutdown on SIGKILL (debug purposes) // 1 - SIGKILL will shut down the whole daemon; 0 - watchdog will reincarnate the daemon #define WATCHDOG_SIGKILL 1 ///////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS // Win-specific headers and calls #include #include #include #define sphSockRecv(_sock,_buf,_len) ::recv(_sock,_buf,_len,0) #define sphSockSend(_sock,_buf,_len) ::send(_sock,_buf,_len,0) #define sphSockClose(_sock) ::closesocket(_sock) #define stat _stat #else // UNIX-specific headers and calls #include #include #include #include #include #include #include #include #include // there's no MSG_NOSIGNAL on OS X #ifndef MSG_NOSIGNAL #define MSG_NOSIGNAL 0 #endif #define sphSockRecv(_sock,_buf,_len) ::recv(_sock,_buf,_len,MSG_NOSIGNAL) #define sphSockSend(_sock,_buf,_len) ::send(_sock,_buf,_len,MSG_NOSIGNAL) #define sphSockClose(_sock) ::close(_sock) #endif #if USE_SYSLOG #include #endif ///////////////////////////////////////////////////////////////////////////// // MISC GLOBALS ///////////////////////////////////////////////////////////////////////////// struct ServedIndex_t { CSphIndex * m_pIndex; CSphString m_sIndexPath; bool m_bEnabled; ///< to disable index in cases when rotation fails bool m_bMlock; bool m_bPreopen; bool m_bOnDiskDict; bool m_bStar; bool m_bExpand; bool m_bToDelete; bool m_bOnlyNew; bool m_bRT; public: ServedIndex_t (); ~ServedIndex_t (); void Reset (); void ReadLock () const; void WriteLock () const; void Unlock () const; private: mutable CSphRwlock m_tLock; }; ///////////////////////////////////////////////////////////////////////////// enum ESphAddIndex { ADD_ERROR = 0, ADD_LOCAL = 1, ADD_DISTR = 2, ADD_RT = 3 }; enum ProtocolType_e { PROTO_SPHINX = 0, PROTO_MYSQL41, PROTO_TOTAL }; const char * g_dProtoNames[PROTO_TOTAL] = { "sphinxapi", "sphinxql" }; static bool g_bService = false; #if USE_WINDOWS static bool g_bServiceStop = false; static const char * g_sServiceName = "searchd"; HANDLE g_hPipe = INVALID_HANDLE_VALUE; #endif static CSphVector g_dArgs; static bool g_bHeadDaemon = false; static bool g_bLogStdout = true; struct CrashQuery_t { const BYTE * m_pQuery; // last query int m_iSize; // last query size WORD m_uCMD; // last command (header) WORD m_uVer; // last command's version (header) bool m_bMySQL; // is query from MySQL or API CrashQuery_t () : m_pQuery ( NULL ) , m_iSize ( 0 ) , m_uCMD ( 0 ) , m_uVer ( 0 ) , m_bMySQL ( false ) { } }; class SphCrashLogger_c { public: SphCrashLogger_c () {} static void Init (); static void Done (); #if !USE_WINDOWS static void HandleCrash ( int ); #else static LONG WINAPI HandleCrash ( EXCEPTION_POINTERS * pExc ); #endif static void SetLastQuery ( const CrashQuery_t & tQuery ); static void SetupTimePID (); static CrashQuery_t GetQuery (); void SetupTLS (); private: CrashQuery_t m_tQuery; // per thread copy of last query for thread mode static CrashQuery_t m_tForkQuery; // copy of last query for fork / prefork modes static SphThreadKey_t m_tLastQueryTLS; // last query ( non threaded workers could use dist_threads too ) }; enum LogFormat_e { LOG_FORMAT_PLAIN, LOG_FORMAT_SPHINXQL }; static ESphLogLevel g_eLogLevel = SPH_LOG_INFO; static int g_iLogFile = STDOUT_FILENO; // log file descriptor static bool g_bLogSyslog = false; static bool g_bQuerySyslog = false; static CSphString g_sLogFile; // log file name static bool g_bLogTty = false; // cached isatty(g_iLogFile) static LogFormat_e g_eLogFormat = LOG_FORMAT_PLAIN; static int g_iReadTimeout = 5; // sec static int g_iWriteTimeout = 5; static int g_iClientTimeout = 300; static int g_iMaxChildren = 0; #if !USE_WINDOWS static bool g_bPreopenIndexes = true; #else static bool g_bPreopenIndexes = false; #endif static bool g_bOnDiskDicts = false; static bool g_bUnlinkOld = true; static bool g_bWatchdog = true; static int g_iExpansionLimit = 0; static bool g_bCompatResults = true; struct Listener_t { int m_iSock; ProtocolType_e m_eProto; }; static CSphVector g_dListeners; static int g_iQueryLogFile = -1; static CSphString g_sQueryLogFile; static const char * g_sPidFile = NULL; static int g_iPidFD = -1; static int g_iMaxMatches = 1000; static int g_iMaxCachedDocs = 0; // in bytes static int g_iMaxCachedHits = 0; // in bytes static int g_iAttrFlushPeriod = 0; // in seconds; 0 means "do not flush" static int g_iMaxPacketSize = 8*1024*1024; // in bytes; for both query packets from clients and response packets from agents static int g_iMaxFilters = 256; static int g_iMaxFilterValues = 4096; static int g_iMaxBatchQueries = 32; static ESphCollation g_eCollation = SPH_COLLATION_DEFAULT; #if !USE_WINDOWS static CSphProcessSharedVariable g_tHaveTTY ( true ); #endif enum Mpm_e { MPM_NONE, ///< process queries in a loop one by one (eg. in --console) MPM_FORK, ///< fork a worker process for each query MPM_PREFORK, ///< keep a number of pre-forked processes MPM_THREADS ///< create a worker thread for each query }; static Mpm_e g_eWorkers = USE_WINDOWS ? MPM_THREADS : MPM_FORK; static int g_iPreforkChildren = 10; // how much workers to keep static CSphVector g_dChildren; static volatile bool g_bAcceptUnlocked = true; // whether this preforked child is guaranteed to be *not* holding a lock around accept static int g_iClientFD = -1; static int g_iDistThreads = 0; enum ThdState_e { THD_HANDSHAKE, THD_NET_READ, THD_NET_WRITE, THD_QUERY, THD_STATE_TOTAL }; const char * g_dThdStates[THD_STATE_TOTAL] = { "handshake", "net_read", "net_write", "query" }; struct ThdDesc_t { SphThread_t m_tThd; ProtocolType_e m_eProto; int m_iClientSock; CSphString m_sClientName; ThdState_e m_eThdState; const char * m_sCommand; int m_iConnID; ///< current conn-id for this thread ThdDesc_t () : m_iClientSock ( 0 ) , m_sCommand ( NULL ) , m_iConnID ( -1 ) {} }; static CSphStaticMutex g_tThdMutex; static CSphVector g_dThd; ///< existing threads table static int g_iConnID = 0; ///< global conn-id in none/fork/threads; current conn-id in prefork static SphThreadKey_t g_tConnKey; ///< current conn-id TLS in threads static int * g_pConnID = NULL; ///< global conn-id ptr in prefork static CSphSharedBuffer g_dConnID; ///< global conn-id storage in prefork (protected by accept mutex) // handshake static char g_sMysqlHandshake[128]; static int g_iMysqlHandshake = 0; ////////////////////////////////////////////////////////////////////////// static CSphString g_sConfigFile; static DWORD g_uCfgCRC32 = 0; static struct stat g_tCfgStat; static CSphConfigParser * g_pCfg = NULL; #if USE_WINDOWS static bool g_bSeamlessRotate = false; #else static bool g_bSeamlessRotate = true; #endif static bool g_bIOStats = false; static bool g_bCpuStats = false; static bool g_bOptNoDetach = false; static bool g_bOptNoLock = false; static bool g_bSafeTrace = false; static bool g_bStripPath = false; static volatile bool g_bDoDelete = false; // do we need to delete any indexes? static volatile int g_iRotateCount = 0; // flag that we are rotating now; set from SIGHUP; cleared on rotation success static volatile sig_atomic_t g_bGotSighup = 0; // we just received SIGHUP; need to log static volatile sig_atomic_t g_bGotSigterm = 0; // we just received SIGTERM; need to shutdown static volatile sig_atomic_t g_bGotSigchld = 0; // we just received SIGCHLD; need to count dead children static volatile sig_atomic_t g_bGotSigusr1 = 0; // we just received SIGUSR1; need to reopen logs // pipe to watchdog to inform that daemon is going to close, so no need to restart it in case of crash static CSphSharedBuffer g_bDaemonAtShutdown; static CSphVector g_dTermChildren; // children to send term signal on rotation is done static int64_t g_tmRotateChildren = 0; // pause to next children term signal after rotation is done static int g_iRotationThrottle = 0; // pause between children term signals after rotation is done /// global index hash /// used in both non-threaded and multi-threaded modes /// /// hash entry is a CSphIndex pointer, rwlock, and a few flags (see ServedIndex_t) /// rlock on entry guarantees it won't change, eg. that index pointer will stay alive /// wlock on entry allows to change (delete/replace) the index pointer /// /// note that entry locks are held outside the hash /// and Delete() honours that by acquiring wlock on an entry first class IndexHash_c : protected SmallStringHash_T { friend class IndexHashIterator_c; typedef SmallStringHash_T BASE; public: explicit IndexHash_c (); virtual ~IndexHash_c (); int GetLength () const { return BASE::GetLength(); } void Reset () { BASE::Reset(); } bool Add ( const ServedIndex_t & tValue, const CSphString & tKey ); bool Delete ( const CSphString & tKey ); const ServedIndex_t * GetRlockedEntry ( const CSphString & tKey ) const; ServedIndex_t * GetWlockedEntry ( const CSphString & tKey ) const; ServedIndex_t & GetUnlockedEntry ( const CSphString & tKey ) const; bool Exists ( const CSphString & tKey ) const; protected: void Rlock () const; void Wlock () const; void Unlock () const; private: mutable CSphRwlock m_tLock; }; /// multi-threaded hash iterator class IndexHashIterator_c : public ISphNoncopyable { public: explicit IndexHashIterator_c ( const IndexHash_c * pHash, bool bWrite=false ); ~IndexHashIterator_c (); bool Next (); ServedIndex_t & Get (); const CSphString & GetKey (); private: const IndexHash_c * m_pHash; IndexHash_c::HashEntry_t * m_pIterator; }; static IndexHash_c * g_pIndexes = NULL; // served indexes hash static CSphVector g_dRotating; // names of indexes to be rotated this time static const char * g_sPrereading = NULL; // name of index currently being preread static CSphIndex * g_pPrereading = NULL; // rotation "buffer" static CSphMutex g_tRotateQueueMutex; static CSphVector g_dRotateQueue; // FIXME? maybe replace it with lockless ring buffer static CSphMutex g_tRotateConfigMutex; static SphThread_t g_tRotateThread; static volatile bool g_bRotateShutdown = false; /// flush parameters of rt indexes static SphThread_t g_tRtFlushThread; static volatile bool g_bRtFlushShutdown = false; struct DistributedMutex_t { void Init (); void Done (); void Lock (); void Unlock (); private: CSphMutex m_tLock; }; static DistributedMutex_t g_tDistLock; enum { SPH_PIPE_PREREAD }; struct PipeInfo_t { int m_iFD; ///< read-pipe to child int m_iHandler; ///< who's my handler (SPH_PIPE_xxx) PipeInfo_t () : m_iFD ( -1 ), m_iHandler ( -1 ) {} }; static CSphVector g_dPipes; ///< currently open read-pipes to children processes struct PoolPtrs_t { const DWORD * m_pMva; const BYTE * m_pStrings; PoolPtrs_t () : m_pMva ( NULL ) , m_pStrings ( NULL ) {} }; ///////////////////////////////////////////////////////////////////////////// /// known commands enum SearchdCommand_e { SEARCHD_COMMAND_SEARCH = 0, SEARCHD_COMMAND_EXCERPT = 1, SEARCHD_COMMAND_UPDATE = 2, SEARCHD_COMMAND_KEYWORDS = 3, SEARCHD_COMMAND_PERSIST = 4, SEARCHD_COMMAND_STATUS = 5, SEARCHD_COMMAND_FLUSHATTRS = 7, SEARCHD_COMMAND_SPHINXQL = 8, SEARCHD_COMMAND_TOTAL }; /// known command versions enum { VER_COMMAND_SEARCH = 0x119, VER_COMMAND_EXCERPT = 0x104, VER_COMMAND_UPDATE = 0x102, VER_COMMAND_KEYWORDS = 0x100, VER_COMMAND_STATUS = 0x100, VER_COMMAND_FLUSHATTRS = 0x100, VER_COMMAND_SPHINXQL = 0x100 }; /// known status return codes enum SearchdStatus_e { SEARCHD_OK = 0, ///< general success, command-specific reply follows SEARCHD_ERROR = 1, ///< general failure, error message follows SEARCHD_RETRY = 2, ///< temporary failure, error message follows, client should retry later SEARCHD_WARNING = 3 ///< general success, warning message and command-specific reply follow }; enum { VER_MASTER = 1 }; /// command names const char * g_dApiCommands[SEARCHD_COMMAND_TOTAL] = { "search", "excerpt", "update", "keywords", "persist", "status", "query", "flushattrs" }; const int MAX_RETRY_COUNT = 8; const int MAX_RETRY_DELAY = 1000; ////////////////////////////////////////////////////////////////////////// const int STATS_MAX_AGENTS = 1024; ///< we'll track stats for this much remote agents /// per-agent query stats struct AgentStats_t { int64_t m_iTimeoutsQuery; ///< number of time-outed queries int64_t m_iTimeoutsConnect; ///< number of time-outed connections int64_t m_iConnectFailures; ///< failed to connect int64_t m_iNetworkErrors; ///< network error int64_t m_iWrongReplies; ///< incomplete reply int64_t m_iUnexpectedClose; ///< agent closed the connection }; struct SearchdStats_t { DWORD m_uStarted; int64_t m_iConnections; int64_t m_iMaxedOut; int64_t m_iCommandCount[SEARCHD_COMMAND_TOTAL]; int64_t m_iAgentConnect; int64_t m_iAgentRetry; int64_t m_iQueries; ///< search queries count (differs from search commands count because of multi-queries) int64_t m_iQueryTime; ///< wall time spent (including network wait time) int64_t m_iQueryCpuTime; ///< CPU time spent int64_t m_iDistQueries; ///< distributed queries count int64_t m_iDistWallTime; ///< wall time spent on distributed queries int64_t m_iDistLocalTime; ///< wall time spent searching local indexes in distributed queries int64_t m_iDistWaitTime; ///< time spent waiting for remote agents in distributed queries int64_t m_iDiskReads; ///< total read IO calls (fired by search queries) int64_t m_iDiskReadBytes; ///< total read IO traffic int64_t m_iDiskReadTime; ///< total read IO time DWORD m_bmAgentStats[STATS_MAX_AGENTS/32]; ///< per-agent storage usage bitmap AgentStats_t m_dAgentStats[STATS_MAX_AGENTS]; ///< per-agent storage }; static SearchdStats_t * g_pStats = NULL; static CSphSharedBuffer g_tStatsBuffer; static CSphProcessSharedMutex g_tStatsMutex; ////////////////////////////////////////////////////////////////////////// struct FlushState_t { int m_bFlushing; ///< update flushing in progress int m_iFlushTag; ///< last flushed tag bool m_bForceCheck; ///< forced check/flush flag }; static volatile FlushState_t * g_pFlush = NULL; static CSphSharedBuffer g_tFlushBuffer; static CSphMutex g_tFlushMutex; ////////////////////////////////////////////////////////////////////////// /// available uservar types enum Uservar_e { USERVAR_INT_SET }; /// uservar name to value binding struct Uservar_t { Uservar_e m_eType; UservarIntSet_c * m_pVal; Uservar_t () : m_eType ( USERVAR_INT_SET ) , m_pVal ( NULL ) {} }; static CSphStaticMutex g_tUservarsMutex; static SmallStringHash_T g_hUservars; ///////////////////////////////////////////////////////////////////////////// // MACHINE-DEPENDENT STUFF ///////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS // Windows hacks #undef EINTR #define LOCK_EX 0 #define LOCK_UN 1 #define STDIN_FILENO fileno(stdin) #define STDOUT_FILENO fileno(stdout) #define STDERR_FILENO fileno(stderr) #define ETIMEDOUT WSAETIMEDOUT #define EWOULDBLOCK WSAEWOULDBLOCK #define EINPROGRESS WSAEINPROGRESS #define EINTR WSAEINTR #define ECONNRESET WSAECONNRESET #define ECONNABORTED WSAECONNABORTED #define socklen_t int #define ftruncate _chsize #define getpid GetCurrentProcessId #endif // USE_WINDOWS const int EXT_COUNT = 8; const int EXT_MVP = 8; const char * g_dNewExts[EXT_COUNT] = { ".new.sph", ".new.spa", ".new.spi", ".new.spd", ".new.spp", ".new.spm", ".new.spk", ".new.sps" }; const char * g_dOldExts[] = { ".old.sph", ".old.spa", ".old.spi", ".old.spd", ".old.spp", ".old.spm", ".old.spk", ".old.sps", ".old.mvp" }; const char * g_dCurExts[] = { ".sph", ".spa", ".spi", ".spd", ".spp", ".spm", ".spk", ".sps", ".mvp" }; ///////////////////////////////////////////////////////////////////////////// // MISC ///////////////////////////////////////////////////////////////////////////// void ReleaseTTYFlag() { #if !USE_WINDOWS g_tHaveTTY.WriteValue(false); #endif } ServedIndex_t::ServedIndex_t () { Reset (); } void ServedIndex_t::Reset () { m_pIndex = NULL; m_bEnabled = true; m_bMlock = false; m_bPreopen = false; m_bOnDiskDict = false; m_bStar = false; m_bExpand = false; m_bToDelete = false; m_bOnlyNew = false; m_bRT = false; m_tLock = CSphRwlock(); if ( g_eWorkers==MPM_THREADS ) m_tLock.Init(); } ServedIndex_t::~ServedIndex_t () { SafeDelete ( m_pIndex ); if ( g_eWorkers==MPM_THREADS ) Verify ( m_tLock.Done() ); } void ServedIndex_t::ReadLock () const { if ( g_eWorkers==MPM_THREADS ) { if ( m_tLock.ReadLock() ) sphLogDebugvv ( "ReadLock %p", this ); else { sphLogDebug ( "ReadLock %p failed", this ); assert ( false ); } } } void ServedIndex_t::WriteLock () const { if ( g_eWorkers==MPM_THREADS ) { if ( m_tLock.WriteLock() ) sphLogDebugvv ( "WriteLock %p", this ); else { sphLogDebug ( "WriteLock %p failed", this ); assert ( false ); } } } void ServedIndex_t::Unlock () const { if ( g_eWorkers==MPM_THREADS ) { if ( m_tLock.Unlock() ) sphLogDebugvv ( "Unlock %p", this ); else { sphLogDebug ( "Unlock %p failed", this ); assert ( false ); } } } ////////////////////////////////////////////////////////////////////////// IndexHashIterator_c::IndexHashIterator_c ( const IndexHash_c * pHash, bool bWrite ) : m_pHash ( pHash ) , m_pIterator ( NULL ) { if ( !bWrite ) m_pHash->Rlock(); else m_pHash->Wlock(); } IndexHashIterator_c::~IndexHashIterator_c () { m_pHash->Unlock(); } bool IndexHashIterator_c::Next () { m_pIterator = m_pIterator ? m_pIterator->m_pNextByOrder : m_pHash->m_pFirstByOrder; return m_pIterator!=NULL; } ServedIndex_t & IndexHashIterator_c::Get () { assert ( m_pIterator ); return m_pIterator->m_tValue; } const CSphString & IndexHashIterator_c::GetKey () { assert ( m_pIterator ); return m_pIterator->m_tKey; } ////////////////////////////////////////////////////////////////////////// IndexHash_c::IndexHash_c () { if ( g_eWorkers==MPM_THREADS ) if ( !m_tLock.Init() ) sphDie ( "failed to init hash indexes rwlock" ); } IndexHash_c::~IndexHash_c() { if ( g_eWorkers==MPM_THREADS ) Verify ( m_tLock.Done() ); } void IndexHash_c::Rlock () const { if ( g_eWorkers==MPM_THREADS ) Verify ( m_tLock.ReadLock() ); } void IndexHash_c::Wlock () const { if ( g_eWorkers==MPM_THREADS ) Verify ( m_tLock.WriteLock() ); } void IndexHash_c::Unlock () const { if ( g_eWorkers==MPM_THREADS ) Verify ( m_tLock.Unlock() ); } bool IndexHash_c::Add ( const ServedIndex_t & tValue, const CSphString & tKey ) { Wlock(); bool bRes = BASE::Add ( tValue, tKey ); Unlock(); return bRes; } bool IndexHash_c::Delete ( const CSphString & tKey ) { // tricky part // hash itself might be unlocked, but entry (!) might still be locked // hence, we also need to acquire a lock on entry, and an exclusive one Wlock(); bool bRes = false; ServedIndex_t * pEntry = BASE::operator() ( tKey ); if ( pEntry ) { pEntry->WriteLock(); pEntry->Unlock(); bRes = BASE::Delete ( tKey ); } Unlock(); return bRes; } const ServedIndex_t * IndexHash_c::GetRlockedEntry ( const CSphString & tKey ) const { Rlock(); ServedIndex_t * pEntry = BASE::operator() ( tKey ); if ( pEntry ) pEntry->ReadLock(); Unlock(); return pEntry; } ServedIndex_t * IndexHash_c::GetWlockedEntry ( const CSphString & tKey ) const { Rlock(); ServedIndex_t * pEntry = BASE::operator() ( tKey ); if ( pEntry ) pEntry->WriteLock(); Unlock(); return pEntry; } ServedIndex_t & IndexHash_c::GetUnlockedEntry ( const CSphString & tKey ) const { return BASE::operator[] ( tKey ); } bool IndexHash_c::Exists ( const CSphString & tKey ) const { Rlock(); bool bRes = BASE::Exists ( tKey ); Unlock(); return bRes; } ////////////////////////////////////////////////////////////////////////// void DistributedMutex_t::Init () { if ( g_eWorkers==MPM_THREADS ) m_tLock.Init(); } void DistributedMutex_t::Done () { if ( g_eWorkers==MPM_THREADS ) m_tLock.Done(); } void DistributedMutex_t::Lock () { if ( g_eWorkers==MPM_THREADS ) m_tLock.Lock(); } void DistributedMutex_t::Unlock() { if ( g_eWorkers==MPM_THREADS ) m_tLock.Unlock(); } ///////////////////////////////////////////////////////////////////////////// // LOGGING ///////////////////////////////////////////////////////////////////////////// void Shutdown (); // forward ref for sphFatal() /// format current timestamp for logging int sphFormatCurrentTime ( char * sTimeBuf, int iBufLen ) { int64_t iNow = sphMicroTimer (); time_t ts = (time_t) ( iNow/1000000 ); // on some systems (eg. FreeBSD 6.2), tv.tv_sec has another type and we can't just pass it #if !USE_WINDOWS struct tm tmp; localtime_r ( &ts, &tmp ); #else struct tm tmp; tmp = *localtime ( &ts ); #endif static const char * sWeekday[7] = { "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" }; static const char * sMonth[12] = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; return snprintf ( sTimeBuf, iBufLen, "%.3s %.3s%3d %.2d:%.2d:%.2d.%.3d %d", sWeekday [ tmp.tm_wday ], sMonth [ tmp.tm_mon ], tmp.tm_mday, tmp.tm_hour, tmp.tm_min, tmp.tm_sec, (int)((iNow%1000000)/1000), 1900+tmp.tm_year ); } /// physically emit log entry /// buffer must have 1 extra byte for linefeed void sphLogEntry ( ESphLogLevel eLevel, char * sBuf, char * sTtyBuf ) { #if USE_WINDOWS if ( g_bService && g_iLogFile==STDOUT_FILENO ) { HANDLE hEventSource; LPCTSTR lpszStrings[2]; hEventSource = RegisterEventSource ( NULL, g_sServiceName ); if ( hEventSource ) { lpszStrings[0] = g_sServiceName; lpszStrings[1] = sBuf; WORD eType = EVENTLOG_INFORMATION_TYPE; switch ( eLevel ) { case SPH_LOG_FATAL: eType = EVENTLOG_ERROR_TYPE; break; case SPH_LOG_WARNING: eType = EVENTLOG_WARNING_TYPE; break; case SPH_LOG_INFO: eType = EVENTLOG_INFORMATION_TYPE; break; } ReportEvent ( hEventSource, // event log handle eType, // event type 0, // event category 0, // event identifier NULL, // no security identifier 2, // size of lpszStrings array 0, // no binary data lpszStrings, // array of strings NULL ); // no binary data DeregisterEventSource ( hEventSource ); } } else #endif { strcat ( sBuf, "\n" ); // NOLINT lseek ( g_iLogFile, 0, SEEK_END ); if ( g_bLogTty ) sphWrite ( g_iLogFile, sTtyBuf, strlen(sTtyBuf) ); else sphWrite ( g_iLogFile, sBuf, strlen(sBuf) ); if ( g_bLogStdout && g_iLogFile!=STDOUT_FILENO ) sphWrite ( STDOUT_FILENO, sTtyBuf, strlen(sTtyBuf) ); } } /// log entry (with log levels, dupe catching, etc) /// call with NULL format for dupe flushing void sphLog ( ESphLogLevel eLevel, const char * sFmt, va_list ap ) { // dupe catcher state static const int FLUSH_THRESH_TIME = 1000000; // in microseconds static const int FLUSH_THRESH_COUNT = 100; static ESphLogLevel eLastLevel = SPH_LOG_INFO; static DWORD uLastEntry = 0; static int64_t tmLastStamp = -1000000-FLUSH_THRESH_TIME; static int iLastRepeats = 0; // only if we can if ( sFmt && eLevel>g_eLogLevel ) return; #if USE_SYSLOG if ( g_bLogSyslog && sFmt ) { const int levels[] = { LOG_EMERG, LOG_WARNING, LOG_INFO, LOG_DEBUG, LOG_DEBUG, LOG_DEBUG }; vsyslog ( levels[eLevel], sFmt, ap ); } #endif if ( g_iLogFile<0 && !g_bService ) return; // format the banner char sTimeBuf[128]; sphFormatCurrentTime ( sTimeBuf, sizeof(sTimeBuf) ); const char * sBanner = ""; if ( sFmt==NULL ) eLevel = eLastLevel; if ( eLevel==SPH_LOG_FATAL ) sBanner = "FATAL: "; if ( eLevel==SPH_LOG_WARNING ) sBanner = "WARNING: "; if ( eLevel>=SPH_LOG_DEBUG ) sBanner = "DEBUG: "; char sBuf [ 1024 ]; snprintf ( sBuf, sizeof(sBuf)-1, "[%s] [%5d] ", sTimeBuf, (int)getpid() ); char * sTtyBuf = sBuf + strlen(sBuf); strncpy ( sTtyBuf, sBanner, 32 ); // 32 is arbitrary; just something that is enough and keeps lint happy int iLen = strlen(sBuf); // format the message if ( sFmt ) vsnprintf ( sBuf+iLen, sizeof(sBuf)-iLen-1, sFmt, ap ); // catch dupes DWORD uEntry = sFmt ? sphCRC32 ( (const BYTE*)( sBuf+iLen ) ) : 0; int64_t tmNow = sphMicroTimer(); // accumulate while possible if ( sFmt && eLevel==eLastLevel && uEntry==uLastEntry && iLastRepeats=tmLastStamp+FLUSH_THRESH_TIME ) ) { // flush if we actually have something to flush, and // case 1: got a message we can't accumulate // case 2: got a periodic flush and been otherwise idle for a thresh period char sLast[256]; strncpy ( sLast, sBuf, iLen ); snprintf ( sLast+iLen, sizeof(sLast)-iLen, "last message repeated %d times", iLastRepeats ); sphLogEntry ( eLastLevel, sLast, sLast + ( sTtyBuf-sBuf ) ); tmLastStamp = tmNow; iLastRepeats = 0; eLastLevel = SPH_LOG_INFO; uLastEntry = 0; } // was that a flush-only call? if ( !sFmt ) return; tmLastStamp = tmNow; iLastRepeats = 0; eLastLevel = eLevel; uLastEntry = uEntry; // do the logging sphLogEntry ( eLevel, sBuf, sTtyBuf ); } void sphFatal ( const char * sFmt, ... ) __attribute__ ( ( format ( printf, 1, 2 ) ) ); void sphFatal ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); sphLog ( SPH_LOG_FATAL, sFmt, ap ); va_end ( ap ); Shutdown (); exit ( 1 ); } #if !USE_WINDOWS static CSphString GetNamedPipeName ( int iPid ) { CSphString sRes; sRes.SetSprintf ( "/tmp/searchd_%d", iPid ); return sRes; } #endif void LogWarning ( const char * sWarning ) { sphWarning ( "%s", sWarning ); } ///////////////////////////////////////////////////////////////////////////// struct StrBuf_t { protected: char m_sBuf [ 2048 ]; char * m_pBuf; int m_iLeft; public: StrBuf_t () { memset ( m_sBuf, 0, sizeof(m_sBuf) ); m_iLeft = sizeof(m_sBuf)-1; m_pBuf = m_sBuf; } const char * cstr () { return m_sBuf; } int GetLength () { return sizeof(m_sBuf)-1-m_iLeft; } bool Append ( const char * s, bool bWhole ) { if ( !s ) return false; int iLen = strlen(s); if ( bWhole && m_iLeft m_dLog; public: void Submit ( const char * sIndex, const char * sError ) { SearchFailure_t & tEntry = m_dLog.Add (); tEntry.m_sIndex = sIndex; tEntry.m_sError = sError; } void SubmitEx ( const char * sIndex, const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 3, 4 ) ) ) { SearchFailure_t & tEntry = m_dLog.Add (); va_list ap; va_start ( ap, sTemplate ); tEntry.m_sIndex = sIndex; tEntry.m_sError.SetSprintfVa ( sTemplate, ap ); va_end ( ap ); } public: bool IsEmpty () { return m_dLog.GetLength()==0; } void BuildReport ( StrBuf_t & sReport ) { if ( IsEmpty() ) return; // collapse same messages m_dLog.Uniq (); int iSpanStart = 0; for ( int i=1; i<=m_dLog.GetLength(); i++ ) { // keep scanning while error text is the same if ( i!=m_dLog.GetLength() ) if ( m_dLog[i].m_sError==m_dLog[i-1].m_sError ) continue; // build current span StrBuf_t sSpan; if ( iSpanStart ) sSpan += "; "; sSpan += "index "; for ( int j=iSpanStart; j & dChildren ) { ARRAY_FOREACH ( i, dChildren ) { int iPID = dChildren[i]; int iStatus = 0; if ( iPID>0 && waitpid ( iPID, &iStatus, WNOHANG )==iPID && ( WIFEXITED ( iStatus ) || WIFSIGNALED ( iStatus ) ) ) iPID = 0; if ( iPID<=0 ) dChildren.RemoveFast ( i-- ); } } #endif void Shutdown () { bool bAttrsSaveOk = true; // some head-only shutdown procedures if ( g_bHeadDaemon ) { if ( !g_bDaemonAtShutdown.IsEmpty() ) { *g_bDaemonAtShutdown.GetWritePtr() = 1; } const int iShutWaitPeriod = 3000000; if ( g_eWorkers==MPM_THREADS ) { // tell flush-rt thread to shutdown, and wait until it does g_bRtFlushShutdown = true; sphThreadJoin ( &g_tRtFlushThread ); // tell rotation thread to shutdown, and wait until it does g_bRotateShutdown = true; if ( g_bSeamlessRotate ) { sphThreadJoin ( &g_tRotateThread ); } g_tRotateQueueMutex.Done(); g_tRotateConfigMutex.Done(); int64_t tmShutStarted = sphMicroTimer(); // stop search threads; up to 3 seconds long while ( g_dThd.GetLength() > 0 && ( sphMicroTimer()-tmShutStarted )0 && ( sphMicroTimer()-tmShutStarted )SaveAttributes() ) { sphWarning ( "index %s: attrs save failed: %s", it.GetKey().cstr(), tServed.m_pIndex->GetLastError().cstr() ); bAttrsSaveOk = false; } } // unlock indexes and release locks if needed for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) if ( it.Get().m_pIndex ) it.Get().m_pIndex->Unlock(); g_pIndexes->Reset(); // clear shut down of rt indexes + binlog g_tDistLock.Done(); SafeDelete ( g_pIndexes ); sphRTDone(); sphShutdownWordforms (); } ARRAY_FOREACH ( i, g_dListeners ) if ( g_dListeners[i].m_iSock>=0 ) sphSockClose ( g_dListeners[i].m_iSock ); #if USE_WINDOWS CloseHandle ( g_hPipe ); #else if ( g_bHeadDaemon ) { const CSphString sPipeName = GetNamedPipeName ( getpid() ); const int hFile = ::open ( sPipeName.cstr(), O_WRONLY | O_NONBLOCK ); if ( hFile!=-1 ) { DWORD uStatus = bAttrsSaveOk; int iDummy; // to avoid gcc unused result warning iDummy = ::write ( hFile, &uStatus, sizeof(DWORD) ); ::close ( hFile ); } } #endif // remove pid if ( g_bHeadDaemon && g_sPidFile ) { ::close ( g_iPidFD ); ::unlink ( g_sPidFile ); } if ( g_bHeadDaemon ) sphInfo ( "shutdown complete" ); if ( g_bHeadDaemon ) { SphCrashLogger_c::Done(); sphThreadDone ( g_iLogFile ); } } #if !USE_WINDOWS void sighup ( int ) { g_bGotSighup = 1; } void sigterm ( int ) { // tricky bit // we can't call exit() here because malloc()/free() are not re-entrant // we could call _exit() but let's try to die gracefully on TERM // and let signal sender wait and send KILL as needed g_bGotSigterm = 1; sphInterruptNow(); } void sigchld ( int ) { g_bGotSigchld = 1; } void sigusr1 ( int ) { g_bGotSigusr1 = 1; } #endif // !USE_WINDOWS struct QueryCopyState_t { BYTE * m_pDst; BYTE * m_pDstEnd; const BYTE * m_pSrc; const BYTE * m_pSrcEnd; }; // crash query handler static const int g_iQueryLineLen = 80; static const char g_dEncodeBase64[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; bool sphCopyEncodedBase64 ( QueryCopyState_t & tEnc ) { BYTE * pDst = tEnc.m_pDst; const BYTE * pDstBase = tEnc.m_pDst; const BYTE * pSrc = tEnc.m_pSrc; const BYTE * pDstEnd = tEnc.m_pDstEnd-5; const BYTE * pSrcEnd = tEnc.m_pSrcEnd-3; while ( pDst<=pDstEnd && pSrc<=pSrcEnd ) { // put line delimiter at max line length if ( ( ( pDst-pDstBase ) % g_iQueryLineLen )>( ( pDst-pDstBase+4 ) % g_iQueryLineLen ) ) *pDst++ = '\n'; // Convert to big endian DWORD uSrc = ( pSrc[0] << 16 ) | ( pSrc[1] << 8 ) | ( pSrc[2] ); pSrc += 3; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x00FC0000 ) >> 18 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x0003F000 ) >> 12 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x00000FC0 ) >> 6 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x0000003F ) ]; } // there is a tail in source data and a room for it at destination buffer if ( pSrc> 18 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x0003F000 ) >> 12 ]; *pDst++ = '='; *pDst++ = '='; } else if ( iLeft==2 ) { DWORD uSrc = ( pSrc[0]<<16 ) | ( pSrc[1] << 8 ); pSrc += 2; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x00FC0000 ) >> 18 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x0003F000 ) >> 12 ]; *pDst++ = g_dEncodeBase64 [ ( uSrc & 0x00000FC0 ) >> 6 ]; *pDst++ = '='; } } tEnc.m_pDst = pDst; tEnc.m_pSrc = pSrc; return ( tEnc.m_pSrcpNextLine && pDst+1>8 ) & 0xff ), (BYTE)( tQuery.m_uCMD & 0xff ), (BYTE)( ( tQuery.m_uVer>>8 ) & 0xff ), (BYTE)( tQuery.m_uVer & 0xff ), (BYTE)( ( tQuery.m_iSize>>24 ) & 0xff ), (BYTE)( ( tQuery.m_iSize>>16 ) & 0xff ), (BYTE)( ( tQuery.m_iSize>>8 ) & 0xff ), (BYTE)( tQuery.m_iSize & 0xff ), *tQuery.m_pQuery }; QueryCopyState_t tHeaderState; tHeaderState.m_pDst = g_dCrashQueryBuff; tHeaderState.m_pDstEnd = g_dCrashQueryBuff + sizeof(g_dCrashQueryBuff); tHeaderState.m_pSrc = dHeader; tHeaderState.m_pSrcEnd = dHeader + sizeof(dHeader); pfnCopy ( tHeaderState ); assert ( tHeaderState.m_pSrc==tHeaderState.m_pSrcEnd ); tCopyState.m_pDst = tHeaderState.m_pDst; tCopyState.m_pSrc++; } else { pfnCopy = &sphCopySphinxQL; } while ( pfnCopy ( tCopyState ) ) { sphWrite ( g_iLogFile, g_dCrashQueryBuff, tCopyState.m_pDst-g_dCrashQueryBuff ); tCopyState.m_pDst = g_dCrashQueryBuff; // reset the destination buffer } assert ( tCopyState.m_pSrc==tCopyState.m_pSrcEnd ); int iLeft = tCopyState.m_pDst-g_dCrashQueryBuff; if ( iLeft>0 ) { sphWrite ( g_iLogFile, g_dCrashQueryBuff, iLeft ); } } // tail sphWrite ( g_iLogFile, g_sCrashedBannerTail, sizeof(g_sCrashedBannerTail)-1 ); sphSafeInfo ( g_iLogFile, "Sphinx " SPHINX_VERSION ); #if USE_WINDOWS // mini-dump reference int iMiniDumpLen = snprintf ( (char *)g_dCrashQueryBuff, sizeof(g_dCrashQueryBuff), "%s %s.%p.mdmp\n", g_sMinidumpBanner, g_sMinidump, tQuery.m_pQuery ); sphWrite ( g_iLogFile, g_dCrashQueryBuff, iMiniDumpLen ); snprintf ( (char *)g_dCrashQueryBuff, sizeof(g_dCrashQueryBuff), "%s.%p.mdmp", g_sMinidump, tQuery.m_pQuery ); #endif // log trace #if !USE_WINDOWS sphBacktrace ( g_iLogFile, g_bSafeTrace ); #else sphBacktrace ( pExc, (char *)g_dCrashQueryBuff ); #endif // threads table if ( g_eWorkers==MPM_THREADS ) { // FIXME? should we try to lock threads table somehow? sphSafeInfo ( g_iLogFile, "--- %d active threads ---", g_dThd.GetLength() ); ARRAY_FOREACH ( iThd, g_dThd ) { ThdDesc_t * pThd = g_dThd[iThd]; sphSafeInfo ( g_iLogFile, "thd %d, proto %s, state %s, command %s", iThd, g_dProtoNames[pThd->m_eProto], g_dThdStates[pThd->m_eThdState], pThd->m_sCommand ? pThd->m_sCommand : "-" ); } } // memory info #if SPH_ALLOCS_PROFILER sphWrite ( g_iLogFile, g_sMemoryStatBanner, sizeof ( g_sMemoryStatBanner )-1 ); sphMemStatDump ( g_iLogFile ); #endif sphSafeInfo ( g_iLogFile, "------- CRASH DUMP END -------" ); CRASH_EXIT; } void SphCrashLogger_c::SetLastQuery ( const CrashQuery_t & tQuery ) { m_tForkQuery = tQuery; SphCrashLogger_c * pCrashLogger = (SphCrashLogger_c *)sphThreadGet ( m_tLastQueryTLS ); if ( pCrashLogger ) { pCrashLogger->m_tQuery = tQuery; } } void SphCrashLogger_c::SetupTimePID () { char sTimeBuf[SPH_TIME_PID_MAX_SIZE]; sphFormatCurrentTime ( sTimeBuf, sizeof(sTimeBuf) ); g_iCrashInfoLen = snprintf ( g_sCrashInfo, SPH_TIME_PID_MAX_SIZE-1, "------- FATAL: CRASH DUMP -------\n[%s] [%5d]\n", sTimeBuf, (int)getpid() ); } void SphCrashLogger_c::SetupTLS () { Verify ( sphThreadSet ( m_tLastQueryTLS, this ) ); } CrashQuery_t SphCrashLogger_c::GetQuery() { SphCrashLogger_c * pCrashLogger = (SphCrashLogger_c *)sphThreadGet ( m_tLastQueryTLS ); return pCrashLogger ? pCrashLogger->m_tQuery : m_tForkQuery; } void SetSignalHandlers () { SphCrashLogger_c::Init(); #if !USE_WINDOWS struct sigaction sa; sigfillset ( &sa.sa_mask ); sa.sa_flags = SA_NOCLDSTOP; bool bSignalsSet = false; for ( ;; ) { sa.sa_handler = sigterm; if ( sigaction ( SIGTERM, &sa, NULL )!=0 ) break; sa.sa_handler = sigterm; if ( sigaction ( SIGINT, &sa, NULL )!=0 ) break; sa.sa_handler = sighup; if ( sigaction ( SIGHUP, &sa, NULL )!=0 ) break; sa.sa_handler = sigusr1; if ( sigaction ( SIGUSR1, &sa, NULL )!=0 ) break; sa.sa_handler = sigchld; if ( sigaction ( SIGCHLD, &sa, NULL )!=0 ) break; sa.sa_handler = SIG_IGN; if ( sigaction ( SIGPIPE, &sa, NULL )!=0 ) break; sa.sa_flags |= SA_RESETHAND; sa.sa_handler = SphCrashLogger_c::HandleCrash; if ( sigaction ( SIGSEGV, &sa, NULL )!=0 ) break; sa.sa_handler = SphCrashLogger_c::HandleCrash; if ( sigaction ( SIGBUS, &sa, NULL )!=0 ) break; sa.sa_handler = SphCrashLogger_c::HandleCrash; if ( sigaction ( SIGABRT, &sa, NULL )!=0 ) break; sa.sa_handler = SphCrashLogger_c::HandleCrash; if ( sigaction ( SIGILL, &sa, NULL )!=0 ) break; sa.sa_handler = SphCrashLogger_c::HandleCrash; if ( sigaction ( SIGFPE, &sa, NULL )!=0 ) break; bSignalsSet = true; break; } if ( !bSignalsSet ) sphFatal ( "sigaction(): %s", strerror(errno) ); #else snprintf ( g_sMinidump, SPH_TIME_PID_MAX_SIZE-1, "%s.%d", g_sPidFile ? g_sPidFile : "", (int)getpid() ); SetUnhandledExceptionFilter ( SphCrashLogger_c::HandleCrash ); #endif } ///////////////////////////////////////////////////////////////////////////// // NETWORK STUFF ///////////////////////////////////////////////////////////////////////////// const int WIN32_PIPE_BUFSIZE = 32; #if USE_WINDOWS /// on Windows, the wrapper just prevents the warnings void sphFDSet ( int fd, fd_set * fdset ) { #pragma warning(disable:4127) // conditional expr is const #pragma warning(disable:4389) // signed/unsigned mismatch FD_SET ( fd, fdset ); #pragma warning(default:4127) // conditional expr is const #pragma warning(default:4389) // signed/unsigned mismatch } #else // !USE_WINDOWS #define SPH_FDSET_OVERFLOW(_fd) ( (_fd)<0 || (_fd)>=(int)FD_SETSIZE ) /// on UNIX, we also check that the descript won't corrupt the stack void sphFDSet ( int fd, fd_set * set ) { if ( SPH_FDSET_OVERFLOW(fd) ) sphFatal ( "sphFDSet() failed fd=%d, FD_SETSIZE=%d", fd, FD_SETSIZE ); else FD_SET ( fd, set ); } #endif // USE_WINDOWS const char * sphSockError ( int iErr=0 ) { #if USE_WINDOWS if ( iErr==0 ) iErr = WSAGetLastError (); static char sBuf [ 256 ]; _snprintf ( sBuf, sizeof(sBuf), "WSA error %d", iErr ); return sBuf; #else return strerror ( errno ); #endif } int sphSockGetErrno () { #if USE_WINDOWS return WSAGetLastError(); #else return errno; #endif } void sphSockSetErrno ( int iErr ) { #if USE_WINDOWS WSASetLastError ( iErr ); #else errno = iErr; #endif } int sphSockPeekErrno () { int iRes = sphSockGetErrno(); sphSockSetErrno ( iRes ); return iRes; } /// formats IP address given in network byte order into sBuffer /// returns the buffer char * sphFormatIP ( char * sBuffer, int iBufferSize, DWORD uAddress ) { const BYTE *a = (const BYTE *)&uAddress; snprintf ( sBuffer, iBufferSize, "%u.%u.%u.%u", a[0], a[1], a[2], a[3] ); return sBuffer; } static const bool GETADDR_STRICT = true; ///< strict check, will die with sphFatal() on failure DWORD sphGetAddress ( const char * sHost, bool bFatal=false ) { struct hostent * pHost = gethostbyname ( sHost ); if ( pHost==NULL || pHost->h_addrtype!=AF_INET ) { if ( bFatal ) sphFatal ( "no AF_INET address found for: %s", sHost ); return 0; } struct in_addr ** ppAddrs = (struct in_addr **)pHost->h_addr_list; assert ( ppAddrs[0] ); assert ( sizeof(DWORD)==pHost->h_length ); DWORD uAddr; memcpy ( &uAddr, ppAddrs[0], sizeof(DWORD) ); if ( ppAddrs[1] ) { char sBuf [ SPH_ADDRESS_SIZE ]; sphWarning ( "multiple addresses found for '%s', using the first one (ip=%s)", sHost, sphFormatIP ( sBuf, sizeof(sBuf), uAddr ) ); } return uAddr; } #if !USE_WINDOWS int sphCreateUnixSocket ( const char * sPath ) { static struct sockaddr_un uaddr; size_t len = strlen ( sPath ); if ( len + 1 > sizeof( uaddr.sun_path ) ) sphFatal ( "UNIX socket path is too long (len=%d)", (int)len ); sphInfo ( "listening on UNIX socket %s", sPath ); memset ( &uaddr, 0, sizeof(uaddr) ); uaddr.sun_family = AF_UNIX; memcpy ( uaddr.sun_path, sPath, len + 1 ); int iSock = socket ( AF_UNIX, SOCK_STREAM, 0 ); if ( iSock==-1 ) sphFatal ( "failed to create UNIX socket: %s", sphSockError() ); if ( unlink ( sPath )==-1 ) { if ( errno!=ENOENT ) sphFatal ( "unlink() on UNIX socket file failed: %s", sphSockError() ); } int iMask = umask ( 0 ); if ( bind ( iSock, (struct sockaddr *)&uaddr, sizeof(uaddr) )!=0 ) sphFatal ( "bind() on UNIX socket failed: %s", sphSockError() ); umask ( iMask ); return iSock; } #endif // !USE_WINDOWS int sphCreateInetSocket ( DWORD uAddr, int iPort ) { char sAddress[SPH_ADDRESS_SIZE]; sphFormatIP ( sAddress, SPH_ADDRESS_SIZE, uAddr ); if ( uAddr==htonl ( INADDR_ANY ) ) sphInfo ( "listening on all interfaces, port=%d", iPort ); else sphInfo ( "listening on %s:%d", sAddress, iPort ); static struct sockaddr_in iaddr; memset ( &iaddr, 0, sizeof(iaddr) ); iaddr.sin_family = AF_INET; iaddr.sin_addr.s_addr = uAddr; iaddr.sin_port = htons ( (short)iPort ); int iSock = socket ( AF_INET, SOCK_STREAM, 0 ); if ( iSock==-1 ) sphFatal ( "failed to create TCP socket: %s", sphSockError() ); int iOn = 1; if ( setsockopt ( iSock, SOL_SOCKET, SO_REUSEADDR, (char*)&iOn, sizeof(iOn) ) ) sphFatal ( "setsockopt() failed: %s", sphSockError() ); int iTries = 12; int iRes; do { iRes = bind ( iSock, (struct sockaddr *)&iaddr, sizeof(iaddr) ); if ( iRes==0 ) break; sphInfo ( "bind() failed on %s, retrying...", sAddress ); sphSleepMsec ( 3000 ); } while ( --iTries>0 ); if ( iRes ) sphFatal ( "bind() failed on %s: %s", sAddress, sphSockError() ); return iSock; } inline bool IsPortInRange ( int iPort ) { return ( iPort>0 ) && ( iPort<=0xFFFF ); } void CheckPort ( int iPort ) { if ( !IsPortInRange(iPort) ) sphFatal ( "port %d is out of range", iPort ); } ProtocolType_e ProtoByName ( const CSphString & sProto ) { if ( sProto=="sphinx" ) return PROTO_SPHINX; else if ( sProto=="mysql41" ) return PROTO_MYSQL41; sphFatal ( "unknown listen protocol type '%s'", sProto.cstr() ? sProto.cstr() : "(NULL)" ); // funny magic // MSVC -O2 whines about unreachable code // everyone else whines about missing return value #if !(USE_WINDOWS && defined(NDEBUG)) return PROTO_SPHINX; #endif } struct ListenerDesc_t { ProtocolType_e m_eProto; CSphString m_sUnix; DWORD m_uIP; int m_iPort; }; ListenerDesc_t ParseListener ( const char * sSpec ) { ListenerDesc_t tRes; tRes.m_eProto = PROTO_SPHINX; tRes.m_sUnix = ""; tRes.m_uIP = htonl ( INADDR_ANY ); tRes.m_iPort = SPHINXAPI_PORT; // split by colon int iParts = 0; CSphString sParts[3]; const char * sPart = sSpec; for ( const char * p = sSpec; ; p++ ) if ( *p=='\0' || *p==':' ) { if ( iParts==3 ) sphFatal ( "invalid listen format (too many fields)" ); sParts[iParts++].SetBinary ( sPart, p-sPart ); if ( !*p ) break; // bail out on zero sPart = p+1; } assert ( iParts>=1 && iParts<=3 ); // handle UNIX socket case // might be either name on itself (1 part), or name+protocol (2 parts) sPart = sParts[0].cstr(); if ( sPart[0]=='/' ) { if ( iParts>2 ) sphFatal ( "invalid listen format (too many fields)" ); if ( iParts==2 ) tRes.m_eProto = ProtoByName ( sParts[1] ); #if USE_WINDOWS sphFatal ( "UNIX sockets are not supported on Windows" ); #else tRes.m_sUnix = sPart; return tRes; #endif } // check if it all starts with a valid port number sPart = sParts[0].cstr(); int iLen = strlen(sPart); bool bAllDigits = true; for ( int i=0; i0 ); int64_t tmMaxTimer = sphMicroTimer() + I64C(1000000)*Max ( 1, iReadTimeout ); // in microseconds int iLeftBytes = iLen; // bytes to read left char * pBuf = (char*) buf; int iRes = -1, iErr = 0; while ( iLeftBytes>0 ) { int64_t tmMicroLeft = tmMaxTimer - sphMicroTimer(); if ( tmMicroLeft<=0 ) break; // timed out fd_set fdRead; FD_ZERO ( &fdRead ); sphFDSet ( iSock, &fdRead ); fd_set fdExcept; FD_ZERO ( &fdExcept ); sphFDSet ( iSock, &fdExcept ); #if USE_WINDOWS // Windows EINTR emulation // Ctrl-C will not interrupt select on Windows, so let's handle that manually // forcibly limit select() to 100 ms, and check flag afterwards if ( bIntr ) tmMicroLeft = Min ( tmMicroLeft, 100000 ); #endif struct timeval tv; tv.tv_sec = (int)( tmMicroLeft / 1000000 ); tv.tv_usec = (int)( tmMicroLeft % 1000000 ); iRes = ::select ( iSock+1, &fdRead, NULL, &fdExcept, &tv ); // if there was EINTR, retry // if any other error, bail if ( iRes==-1 ) { // only let SIGTERM (of all them) to interrupt, and only if explicitly allowed iErr = sphSockGetErrno(); if ( iErr==EINTR && !( g_bGotSigterm && bIntr )) continue; if ( iErr==EINTR ) sphLogDebug ( "sphSockRead: select got SIGTERM, exit -1" ); sphSockSetErrno ( iErr ); return -1; } // if there was a timeout, report it as an error if ( iRes==0 ) { #if USE_WINDOWS // Windows EINTR emulation if ( bIntr ) { // got that SIGTERM if ( g_bGotSigterm ) { sphLogDebug ( "sphSockRead: got SIGTERM emulation on Windows, exit -1" ); sphSockSetErrno ( EINTR ); return -1; } // timeout might not be fully over just yet, so re-loop continue; } #endif sphSockSetErrno ( ETIMEDOUT ); return -1; } // try to receive next chunk iRes = sphSockRecv ( iSock, pBuf, iLeftBytes ); // if there was eof, we're done if ( iRes==0 ) { sphSockSetErrno ( ECONNRESET ); return -1; } // if there was EINTR, retry // if any other error, bail if ( iRes==-1 ) { // only let SIGTERM (of all them) to interrupt, and only if explicitly allowed iErr = sphSockGetErrno(); if ( iErr==EINTR && !( g_bGotSigterm && bIntr )) continue; if ( iErr==EINTR ) sphLogDebug ( "sphSockRead: select got SIGTERM, exit -1" ); sphSockSetErrno ( iErr ); return -1; } // update pBuf += iRes; iLeftBytes -= iRes; // avoid partial buffer loss in case of signal during the 2nd (!) read bIntr = false; } // if there was a timeout, report it as an error if ( iLeftBytes!=0 ) { sphSockSetErrno ( ETIMEDOUT ); return -1; } return iLen; } ///////////////////////////////////////////////////////////////////////////// // NETWORK BUFFERS ///////////////////////////////////////////////////////////////////////////// /// fixed-memory response buffer /// tracks usage, and flushes to network when necessary class NetOutputBuffer_c { public: explicit NetOutputBuffer_c ( int iSock ); bool SendInt ( int iValue ) { return SendT ( htonl ( iValue ) ); } bool SendAsDword ( int64_t iValue ) ///< sends the 32bit MAX_UINT if the value is greater than it. { if ( iValue < 0 ) return SendDword ( 0 ); if ( iValue > UINT_MAX ) return SendDword ( UINT_MAX ); return SendDword ( DWORD(iValue) ); } bool SendDword ( DWORD iValue ) { return SendT ( htonl ( iValue ) ); } bool SendLSBDword ( DWORD v ) { SendByte ( (BYTE)( v&0xff ) ); SendByte ( (BYTE)( (v>>8)&0xff ) ); SendByte ( (BYTE)( (v>>16)&0xff ) ); return SendByte ( (BYTE)( (v>>24)&0xff) ); } bool SendWord ( WORD iValue ) { return SendT ( htons ( iValue ) ); } bool SendUint64 ( uint64_t iValue ) { SendT ( htonl ( (DWORD)(iValue>>32) ) ); return SendT ( htonl ( (DWORD)(iValue&0xffffffffUL) ) ); } bool SendFloat ( float fValue ) { return SendT ( htonl ( sphF2DW ( fValue ) ) ); } bool SendByte ( BYTE uValue ) { return SendT ( uValue ); } #if USE_64BIT bool SendDocid ( SphDocID_t iValue ) { return SendUint64 ( iValue ); } #else bool SendDocid ( SphDocID_t iValue ) { return SendDword ( iValue ); } #endif bool SendString ( const char * sStr ); bool SendMysqlString ( const char * sStr ); bool Flush ( bool bUnfreeze=false ); bool GetError () { return m_bError; } int GetSentCount () { return m_iSent; } void FreezeBlock ( const char * sError, int iLen ); protected: BYTE m_dBuffer[NETOUTBUF]; ///< my buffer BYTE * m_pBuffer; ///< my current buffer position int m_iSock; ///< my socket bool m_bError; ///< if there were any write errors int m_iSent; const char *m_sError; ///< fallback message if the frozen buf overloaded int m_iErrorLength; bool m_bFlushEnabled; ///< in frozen state we never flush until special command BYTE * m_pSize; ///< the pointer to the size of frozen block protected: bool SetError ( bool bValue ); ///< set error flag bool FlushIf ( int iToAdd ); ///< flush if there's not enough free space to add iToAdd bytes public: bool SendBytes ( const void * pBuf, int iLen ); ///< (was) protected to avoid network-vs-host order bugs template < typename T > bool SendT ( T tValue ); ///< (was) protected to avoid network-vs-host order bugs }; /// generic request buffer class InputBuffer_c { public: InputBuffer_c ( const BYTE * pBuf, int iLen ); virtual ~InputBuffer_c () {} int GetInt () { return ntohl ( GetT () ); } WORD GetWord () { return ntohs ( GetT () ); } DWORD GetDword () { return ntohl ( GetT () ); } DWORD GetLSBDword () { return GetByte() + ( GetByte()<<8 ) + ( GetByte()<<16 ) + ( GetByte()<<24 ); } uint64_t GetUint64() { uint64_t uRes = GetDword(); return (uRes<<32)+GetDword(); } BYTE GetByte () { return GetT (); } float GetFloat () { return sphDW2F ( ntohl ( GetT () ) ); } CSphString GetString (); CSphString GetRawString ( int iLen ); int GetDwords ( DWORD ** pBuffer, int iMax, const char * sErrorTemplate ); bool GetError () { return m_bError; } template < typename T > bool GetDwords ( CSphVector & dBuffer, int iMax, const char * sErrorTemplate ); template < typename T > bool GetQwords ( CSphVector & dBuffer, int iMax, const char * sErrorTemplate ); virtual void SendErrorReply ( const char *, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ) = 0; protected: const BYTE * m_pBuf; const BYTE * m_pCur; bool m_bError; int m_iLen; protected: void SetError ( bool bError ) { m_bError = bError; } bool GetBytes ( void * pBuf, int iLen ); template < typename T > T GetT (); }; /// simple memory request buffer class MemInputBuffer_c : public InputBuffer_c { public: MemInputBuffer_c ( const BYTE * pBuf, int iLen ) : InputBuffer_c ( pBuf, iLen ) {} virtual void SendErrorReply ( const char *, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ) {} }; /// simple network request buffer class NetInputBuffer_c : public InputBuffer_c { public: explicit NetInputBuffer_c ( int iSock ); virtual ~NetInputBuffer_c (); bool ReadFrom ( int iLen, int iTimeout, bool bIntr=false, bool bAppend=false ); bool ReadFrom ( int iLen ) { return ReadFrom ( iLen, g_iReadTimeout ); } virtual void SendErrorReply ( const char *, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ); const BYTE * GetBufferPtr () const { return m_pBuf; } bool IsIntr () const { return m_bIntr; } protected: static const int NET_MINIBUFFER_SIZE = 4096; int m_iSock; bool m_bIntr; BYTE m_dMinibufer[NET_MINIBUFFER_SIZE]; int m_iMaxibuffer; BYTE * m_pMaxibuffer; }; ///////////////////////////////////////////////////////////////////////////// NetOutputBuffer_c::NetOutputBuffer_c ( int iSock ) : m_pBuffer ( m_dBuffer ) , m_iSock ( iSock ) , m_bError ( false ) , m_iSent ( 0 ) , m_bFlushEnabled ( true ) { assert ( m_iSock>0 ); } template < typename T > bool NetOutputBuffer_c::SendT ( T tValue ) { if ( m_bError ) return false; FlushIf ( sizeof(T) ); sphUnalignedWrite ( m_pBuffer, tValue ); m_pBuffer += sizeof(T); assert ( m_pBuffer>8 ); return (void*)pOutput; } if ( iValue<=0xFFFFFF ) { *pOutput++ = '\xFD'; *pOutput++ = (char)iValue; *pOutput++ = (char)( iValue>>8 ); *pOutput++ = (char)( iValue>>16 ); return (void *) pOutput; } *pOutput++ = '\xFE'; *pOutput++ = (char)iValue; *pOutput++ = (char)( iValue>>8 ); *pOutput++ = (char)( iValue>>16 ); *pOutput++ = (char)( iValue>>24 ); *pOutput++ = 0; *pOutput++ = 0; *pOutput++ = 0; *pOutput++ = 0; return (void*)pOutput; } int MysqlUnpack ( InputBuffer_c & tReq, DWORD * pSize ) { assert ( pSize ); int iRes = tReq.GetByte(); --*pSize; if ( iRes < 251 ) return iRes; if ( iRes==0xFC ) { *pSize -=2; return tReq.GetByte() + ((int)tReq.GetByte()<<8); } if ( iRes==0xFD ) { *pSize -= 3; return tReq.GetByte() + ((int)tReq.GetByte()<<8) + ((int)tReq.GetByte()<<16); } if ( iRes==0xFE ) iRes = tReq.GetByte() + ((int)tReq.GetByte()<<8) + ((int)tReq.GetByte()<<16) + ((int)tReq.GetByte()<<24); tReq.GetByte(); tReq.GetByte(); tReq.GetByte(); tReq.GetByte(); *pSize -= 8; return iRes; } bool NetOutputBuffer_c::SendMysqlString ( const char * sStr ) { if ( m_bError ) return false; int iLen = strlen(sStr); BYTE dBuf[12]; BYTE * pBuf = (BYTE*) MysqlPack ( dBuf, iLen ); SendBytes ( dBuf, (int)( pBuf-dBuf ) ); return SendBytes ( sStr, iLen ); } bool NetOutputBuffer_c::SendBytes ( const void * pBuf, int iLen ) { BYTE * pMy = (BYTE*)pBuf; while ( iLen>0 && !m_bError ) { int iLeft = sizeof(m_dBuffer) - ( m_pBuffer - m_dBuffer ); if ( iLen<=iLeft ) { memcpy ( m_pBuffer, pMy, iLen ); m_pBuffer += iLen; break; } memcpy ( m_pBuffer, pMy, iLeft ); m_pBuffer += iLeft; Flush (); pMy += iLeft; iLen -= iLeft; } return !m_bError; } bool NetOutputBuffer_c::Flush ( bool bUnfreeze ) { if ( m_bError ) return false; int iLen = m_pBuffer-m_dBuffer; if ( iLen==0 ) return true; if ( g_bGotSigterm ) sphLogDebug ( "SIGTERM in NetOutputBuffer::Flush" ); if ( bUnfreeze ) { BYTE * pBuf = m_pBuffer; m_pBuffer = m_pSize; SendDword ( pBuf-m_pSize-4 ); m_pBuffer = pBuf; m_bFlushEnabled = true; } // buffer overloaded. It is fail. Send the error message. if ( !m_bFlushEnabled ) { sphLogDebug ( "NetOutputBuffer with disabled flush is overloaded" ); m_pBuffer = m_dBuffer; SendBytes ( m_sError, m_iErrorLength ); iLen = m_pBuffer-m_dBuffer; if ( iLen==0 ) return true; } assert ( iLen>0 ); assert ( iLen<=(int)sizeof(m_dBuffer) ); char * pBuffer = (char *)&m_dBuffer[0]; const int64_t tmMaxTimer = sphMicroTimer() + g_iWriteTimeout*1000000; // in microseconds while ( !m_bError ) { int iRes = sphSockSend ( m_iSock, pBuffer, iLen ); if ( iRes < 0 ) { int iErrno = sphSockGetErrno(); if ( iErrno==EINTR ) // interrupted before any data was sent; just loop continue; if ( iErrno!=EAGAIN && iErrno!=EWOULDBLOCK ) { sphWarning ( "send() failed: %d: %s", iErrno, sphSockError(iErrno) ); m_bError = true; break; } } else { m_iSent += iRes; pBuffer += iRes; iLen -= iRes; if ( iLen==0 ) break; } int64_t tmMicroLeft = tmMaxTimer - sphMicroTimer(); if ( tmMicroLeft>0 ) { fd_set fdWrite; FD_ZERO ( &fdWrite ); sphFDSet ( m_iSock, &fdWrite ); struct timeval tvTimeout; tvTimeout.tv_sec = (int)( tmMicroLeft / 1000000 ); tvTimeout.tv_usec = (int)( tmMicroLeft % 1000000 ); iRes = select ( m_iSock+1, NULL, &fdWrite, NULL, &tvTimeout ); } else iRes = 0; switch ( iRes ) { case 1: // ready for writing break; case 0: // timed out { sphWarning ( "timed out while trying to flush network buffers" ); m_bError = true; break; } case -1: // error { int iErrno = sphSockGetErrno(); if ( iErrno==EINTR ) break; sphWarning ( "select() failed: %d: %s", iErrno, sphSockError(iErrno) ); m_bError = true; break; } } } m_pBuffer = m_dBuffer; return !m_bError; } void NetOutputBuffer_c::FreezeBlock ( const char * sError, int iLen ) { m_sError = sError; m_iErrorLength = iLen; m_bFlushEnabled = false; // reserve the DWORD for the size m_pSize = m_pBuffer; SendDword ( 0 ); } bool NetOutputBuffer_c::FlushIf ( int iToAdd ) { if ( ( m_pBuffer+iToAdd )>=( m_dBuffer+sizeof(m_dBuffer) ) ) return Flush (); return !m_bError; } ///////////////////////////////////////////////////////////////////////////// InputBuffer_c::InputBuffer_c ( const BYTE * pBuf, int iLen ) : m_pBuf ( pBuf ) , m_pCur ( pBuf ) , m_bError ( !pBuf || iLen<0 ) , m_iLen ( iLen ) {} template < typename T > T InputBuffer_c::GetT () { if ( m_bError || ( m_pCur+sizeof(T) > m_pBuf+m_iLen ) ) { SetError ( true ); return 0; } T iRes = sphUnalignedRead ( *(T*)m_pCur ); m_pCur += sizeof(T); return iRes; } CSphString InputBuffer_c::GetString () { CSphString sRes; int iLen = GetInt (); if ( m_bError || iLen<0 || iLen>g_iMaxPacketSize || ( m_pCur+iLen > m_pBuf+m_iLen ) ) { SetError ( true ); return sRes; } sRes.SetBinary ( (char*)m_pCur, iLen ); m_pCur += iLen; return sRes; } CSphString InputBuffer_c::GetRawString ( int iLen ) { CSphString sRes; if ( m_bError || iLen<0 || iLen>g_iMaxPacketSize || ( m_pCur+iLen > m_pBuf+m_iLen ) ) { SetError ( true ); return sRes; } sRes.SetBinary ( (char*)m_pCur, iLen ); m_pCur += iLen; return sRes; } bool InputBuffer_c::GetBytes ( void * pBuf, int iLen ) { assert ( pBuf ); assert ( iLen>0 && iLen<=g_iMaxPacketSize ); if ( m_bError || ( m_pCur+iLen > m_pBuf+m_iLen ) ) { SetError ( true ); return false; } memcpy ( pBuf, m_pCur, iLen ); m_pCur += iLen; return true; } int InputBuffer_c::GetDwords ( DWORD ** ppBuffer, int iMax, const char * sErrorTemplate ) { assert ( ppBuffer ); assert ( !(*ppBuffer) ); int iCount = GetInt (); if ( iCount<0 || iCount>iMax ) { SendErrorReply ( sErrorTemplate, iCount, iMax ); SetError ( true ); return -1; } if ( iCount ) { assert ( !(*ppBuffer) ); // potential leak (*ppBuffer) = new DWORD [ iCount ]; if ( !GetBytes ( (*ppBuffer), sizeof(DWORD)*iCount ) ) { SafeDeleteArray ( (*ppBuffer) ); return -1; } for ( int i=0; i bool InputBuffer_c::GetDwords ( CSphVector & dBuffer, int iMax, const char * sErrorTemplate ) { int iCount = GetInt (); if ( iCount<0 || iCount>iMax ) { SendErrorReply ( sErrorTemplate, iCount, iMax ); SetError ( true ); return false; } dBuffer.Resize ( iCount ); ARRAY_FOREACH ( i, dBuffer ) dBuffer[i] = GetDword (); if ( m_bError ) dBuffer.Reset (); return !m_bError; } template < typename T > bool InputBuffer_c::GetQwords ( CSphVector & dBuffer, int iMax, const char * sErrorTemplate ) { int iCount = GetInt (); if ( iCount<0 || iCount>iMax ) { SendErrorReply ( sErrorTemplate, iCount, iMax ); SetError ( true ); return false; } dBuffer.Resize ( iCount ); ARRAY_FOREACH ( i, dBuffer ) dBuffer[i] = GetUint64 (); if ( m_bError ) dBuffer.Reset (); return !m_bError; } ///////////////////////////////////////////////////////////////////////////// NetInputBuffer_c::NetInputBuffer_c ( int iSock ) : InputBuffer_c ( m_dMinibufer, sizeof(m_dMinibufer) ) , m_iSock ( iSock ) , m_bIntr ( false ) , m_iMaxibuffer ( 0 ) , m_pMaxibuffer ( NULL ) {} NetInputBuffer_c::~NetInputBuffer_c () { SafeDeleteArray ( m_pMaxibuffer ); } bool NetInputBuffer_c::ReadFrom ( int iLen, int iTimeout, bool bIntr, bool bAppend ) { assert (!( bAppend && m_pCur!=m_pBuf && m_pBuf!=m_pMaxibuffer )); // only allow appends to untouched maxi-buffers int iCur = bAppend ? m_iLen : 0; m_bIntr = false; if ( iLen<=0 || iLen>g_iMaxPacketSize || m_iSock<0 ) return false; BYTE * pBuf = m_dMinibufer + iCur; if ( ( iCur+iLen )>NET_MINIBUFFER_SIZE ) { if ( ( iCur+iLen )>m_iMaxibuffer ) { if ( iCur ) { BYTE * pNew = new BYTE [ iCur+iLen ]; memcpy ( pNew, m_pCur, iCur ); SafeDeleteArray ( m_pMaxibuffer ); m_pMaxibuffer = pNew; m_iMaxibuffer = iCur+iLen; } else { SafeDeleteArray ( m_pMaxibuffer ); m_pMaxibuffer = new BYTE [ iLen ]; m_iMaxibuffer = iLen; } } pBuf = m_pMaxibuffer; } m_pCur = m_pBuf = pBuf; int iGot = sphSockRead ( m_iSock, pBuf + iCur, iLen, iTimeout, bIntr ); if ( g_bGotSigterm ) { sphLogDebug ( "NetInputBuffer_c::ReadFrom: got SIGTERM, return false" ); m_bError = true; m_bIntr = true; return false; } m_bError = ( iGot!=iLen ); m_bIntr = m_bError && ( sphSockPeekErrno()==EINTR ); m_iLen = m_bError ? 0 : iCur+iLen; return !m_bError; } void NetInputBuffer_c::SendErrorReply ( const char * sTemplate, ... ) { char dBuf [ 2048 ]; const int iHeaderLen = 12; const int iMaxStrLen = sizeof(dBuf) - iHeaderLen - 1; // fill header WORD * p0 = (WORD*)&dBuf[0]; p0[0] = htons ( SEARCHD_ERROR ); // error code p0[1] = 0; // version doesn't matter // fill error string char * sBuf = dBuf + iHeaderLen; va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf, iMaxStrLen, sTemplate, ap ); va_end ( ap ); sBuf[iMaxStrLen] = '\0'; int iStrLen = strlen(sBuf); // fixup lengths DWORD * p4 = (DWORD*)&dBuf[4]; p4[0] = htonl ( 4+iStrLen ); p4[1] = htonl ( iStrLen ); // send! sphSockSend ( m_iSock, dBuf, iHeaderLen+iStrLen ); // --console logging if ( g_bOptNoDetach && g_eLogFormat!=LOG_FORMAT_SPHINXQL ) sphInfo ( "query error: %s", sBuf ); } // fix MSVC 2005 fuckup #if USE_WINDOWS #pragma conform(forScope,on) #endif ///////////////////////////////////////////////////////////////////////////// // DISTRIBUTED QUERIES ///////////////////////////////////////////////////////////////////////////// /// remote agent descriptor (stored in a global hash) struct AgentDesc_t { CSphString m_sHost; ///< remote searchd host int m_iPort; ///< remote searchd port, 0 if local CSphString m_sPath; ///< local searchd UNIX socket path CSphString m_sIndexes; ///< remote index names to query bool m_bBlackhole; ///< blackhole agent flag int m_iFamily; ///< TCP or UNIX socket DWORD m_uAddr; ///< IP address int m_iStatsIndex; ///< index into global searchd stats array public: AgentDesc_t () : m_iPort ( -1 ) , m_bBlackhole ( false ) , m_iFamily ( AF_INET ) , m_uAddr ( 0 ) , m_iStatsIndex ( -1 ) {} }; /// remote agent state enum AgentState_e { AGENT_UNUSED, ///< agent is unused for this request AGENT_CONNECT, ///< connecting to agent AGENT_HELLO, ///< waiting for "VER x" hello AGENT_QUERY, ///< query sent, waiting for reply AGENT_PREREPLY, ///< query sent, activity detected, need to read reply AGENT_REPLY, ///< reading reply AGENT_RETRY ///< should retry }; /// remote agent connection (local per-query state) struct AgentConn_t : public AgentDesc_t { int m_iSock; ///< socket number, -1 if not connected AgentState_e m_eState; ///< current state bool m_bSuccess; ///< whether last request was succesful (ie. there are available results) CSphString m_sFailure; ///< failure message int m_iReplyStatus; ///< reply status code int m_iReplySize; ///< how many reply bytes are there int m_iReplyRead; ///< how many reply bytes are alredy received BYTE * m_pReplyBuf; ///< reply buffer CSphVector m_dResults; ///< multi-query results int64_t m_iWall; ///< wall time spent vs this agent public: AgentConn_t () : m_iSock ( -1 ) , m_eState ( AGENT_UNUSED ) , m_bSuccess ( false ) , m_iReplyStatus ( -1 ) , m_iReplySize ( 0 ) , m_iReplyRead ( 0 ) , m_pReplyBuf ( NULL ) , m_iWall ( 0 ) {} ~AgentConn_t () { Close (); } void Close () { SafeDeleteArray ( m_pReplyBuf ); if ( m_iSock>0 ) { sphSockClose ( m_iSock ); m_iSock = -1; if ( m_eState!=AGENT_RETRY ) m_eState = AGENT_UNUSED; } m_iWall += sphMicroTimer (); } CSphString GetName() const { CSphString sName; switch ( m_iFamily ) { case AF_INET: sName.SetSprintf ( "%s:%u", m_sHost.cstr(), m_iPort ); break; case AF_UNIX: sName = m_sPath; break; } return sName; } AgentConn_t & operator = ( const AgentDesc_t & rhs ) { m_sHost = rhs.m_sHost; m_iPort = rhs.m_iPort; m_sPath = rhs.m_sPath; m_sIndexes = rhs.m_sIndexes; m_bBlackhole = rhs.m_bBlackhole; m_iFamily = rhs.m_iFamily; m_uAddr = rhs.m_uAddr; m_iStatsIndex = rhs.m_iStatsIndex; return *this; } }; /// distributed index struct DistributedIndex_t { CSphVector m_dAgents; ///< remote agents CSphVector m_dLocal; ///< local indexes int m_iAgentConnectTimeout; ///< in msec int m_iAgentQueryTimeout; ///< in msec bool m_bToDelete; ///< should be deleted public: DistributedIndex_t () : m_iAgentConnectTimeout ( 1000 ) , m_iAgentQueryTimeout ( 3000 ) , m_bToDelete ( false ) {} }; /// global distributed index definitions hash static SmallStringHash_T < DistributedIndex_t > g_hDistIndexes; ///////////////////////////////////////////////////////////////////////////// struct IRequestBuilder_t : public ISphNoncopyable { virtual ~IRequestBuilder_t () {} // to avoid gcc4 warns virtual void BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int iAgent ) const = 0; }; struct IReplyParser_t { virtual ~IReplyParser_t () {} // to avoid gcc4 warns virtual bool ParseReply ( MemInputBuffer_c & tReq, AgentConn_t & tAgent, int iAgent ) const = 0; }; #define AGENT_STATS_INC(_agent,_counter) \ if ( g_pStats && _agent.m_iStatsIndex>=0 && _agent.m_iStatsIndexm_dAgentStats [ _agent.m_iStatsIndex ]._counter++; \ g_tStatsMutex.Unlock (); \ } void ConnectToRemoteAgents ( CSphVector & dAgents, bool bRetryOnly ) { ARRAY_FOREACH ( iAgent, dAgents ) { AgentConn_t & tAgent = dAgents[iAgent]; if ( bRetryOnly && ( tAgent.m_eState!=AGENT_RETRY ) ) continue; tAgent.m_eState = AGENT_UNUSED; tAgent.m_bSuccess = false; socklen_t len = 0; struct sockaddr_storage ss; memset ( &ss, 0, sizeof(ss) ); ss.ss_family = (short)tAgent.m_iFamily; if ( ss.ss_family==AF_INET ) { struct sockaddr_in *in = (struct sockaddr_in *)&ss; in->sin_port = htons ( (unsigned short)tAgent.m_iPort ); in->sin_addr.s_addr = tAgent.m_uAddr; len = sizeof(*in); } #if !USE_WINDOWS else if ( ss.ss_family==AF_UNIX ) { struct sockaddr_un *un = (struct sockaddr_un *)&ss; snprintf ( un->sun_path, sizeof(un->sun_path), "%s", tAgent.m_sPath.cstr() ); len = sizeof(*un); } #endif tAgent.m_iSock = socket ( tAgent.m_iFamily, SOCK_STREAM, 0 ); if ( tAgent.m_iSock<0 ) { tAgent.m_sFailure.SetSprintf ( "socket() failed: %s", sphSockError() ); return; } if ( sphSetSockNB ( tAgent.m_iSock )<0 ) { tAgent.m_sFailure.SetSprintf ( "sphSetSockNB() failed: %s", sphSockError() ); return; } // count connects if ( g_pStats ) { g_tStatsMutex.Lock(); g_pStats->m_iAgentConnect++; if ( bRetryOnly ) g_pStats->m_iAgentRetry++; g_tStatsMutex.Unlock(); } if ( !bRetryOnly ) tAgent.m_iWall = 0; tAgent.m_iWall -= sphMicroTimer(); if ( connect ( tAgent.m_iSock, (struct sockaddr*)&ss, len )<0 ) { int iErr = sphSockGetErrno(); if ( iErr!=EINPROGRESS && iErr!=EINTR && iErr!=EWOULDBLOCK ) // check for EWOULDBLOCK is for winsock only { tAgent.Close (); tAgent.m_sFailure.SetSprintf ( "connect() failed: %s", sphSockError(iErr) ); tAgent.m_eState = AGENT_RETRY; // do retry on connect() failures AGENT_STATS_INC ( tAgent, m_iConnectFailures ); return; } else { // connection in progress tAgent.m_eState = AGENT_CONNECT; } } else { // socket connected, ready to read hello message tAgent.m_eState = AGENT_HELLO; } } } int QueryRemoteAgents ( CSphVector & dAgents, int iTimeout, const IRequestBuilder_t & tBuilder, int64_t * pWaited ) { int iAgents = 0; assert ( iTimeout>=0 ); int64_t tmMaxTimer = sphMicroTimer() + iTimeout*1000; // in microseconds for ( ;; ) { fd_set fdsRead, fdsWrite; FD_ZERO ( &fdsRead ); FD_ZERO ( &fdsWrite ); int iMax = 0; bool bDone = true; ARRAY_FOREACH ( i, dAgents ) { const AgentConn_t & tAgent = dAgents[i]; if ( tAgent.m_eState==AGENT_CONNECT || tAgent.m_eState==AGENT_HELLO || tAgent.m_eState==AGENT_QUERY ) { assert ( !tAgent.m_sPath.IsEmpty() || tAgent.m_iPort>0 ); assert ( tAgent.m_iSock>0 ); sphFDSet ( tAgent.m_iSock, ( tAgent.m_eState==AGENT_CONNECT ) ? &fdsWrite : &fdsRead ); iMax = Max ( iMax, tAgent.m_iSock ); if ( tAgent.m_eState!=AGENT_QUERY ) bDone = false; } } if ( bDone ) break; int64_t tmSelect = sphMicroTimer(); int64_t tmMicroLeft = tmMaxTimer - tmSelect; if ( tmMicroLeft<=0 ) break; // FIXME? what about iTimeout==0 case? struct timeval tvTimeout; tvTimeout.tv_sec = (int)( tmMicroLeft/ 1000000 ); // full seconds tvTimeout.tv_usec = (int)( tmMicroLeft % 1000000 ); // microseconds // we don't care about exceptfds; they're for OOB only int iSelected = select ( 1+iMax, &fdsRead, &fdsWrite, NULL, &tvTimeout ); if ( pWaited ) *pWaited += sphMicroTimer() - tmSelect; if ( iSelected<=0 ) continue; ARRAY_FOREACH ( i, dAgents ) { AgentConn_t & tAgent = dAgents[i]; // check if connection completed // tricky part, we MUST use write-set ONLY here at this check // even though we can't tell connect() success from just OS send buffer availability // but any check involving read-set just never ever completes, so... if ( tAgent.m_eState==AGENT_CONNECT && FD_ISSET ( tAgent.m_iSock, &fdsWrite ) ) { int iErr = 0; socklen_t iErrLen = sizeof(iErr); getsockopt ( tAgent.m_iSock, SOL_SOCKET, SO_ERROR, (char*)&iErr, &iErrLen ); if ( iErr ) { // connect() failure tAgent.m_sFailure.SetSprintf ( "connect() failed: %s", sphSockError(iErr) ); tAgent.Close (); AGENT_STATS_INC ( tAgent, m_iConnectFailures ); } else { // connect() success tAgent.m_eState = AGENT_HELLO; } continue; } // check if hello was received if ( tAgent.m_eState==AGENT_HELLO && FD_ISSET ( tAgent.m_iSock, &fdsRead ) ) { // read reply int iRemoteVer; int iRes = sphSockRecv ( tAgent.m_iSock, (char*)&iRemoteVer, sizeof(iRemoteVer) ); if ( iRes!=sizeof(iRemoteVer) ) { tAgent.Close (); if ( iRes<0 ) { // network error int iErr = sphSockGetErrno(); tAgent.m_sFailure.SetSprintf ( "handshake failure (errno=%d, msg=%s)", iErr, sphSockError(iErr) ); AGENT_STATS_INC ( tAgent, m_iNetworkErrors ); } else if ( iRes>0 ) { // incomplete reply tAgent.m_sFailure.SetSprintf ( "handshake failure (exp=%d, recv=%d)", (int)sizeof(iRemoteVer), iRes ); AGENT_STATS_INC ( tAgent, m_iWrongReplies ); } else { // agent closed the connection // this might happen in out-of-sync connect-accept case; so let's retry tAgent.m_sFailure = "handshake failure (connection was closed)"; tAgent.m_eState = AGENT_RETRY; AGENT_STATS_INC ( tAgent, m_iUnexpectedClose ); } continue; } iRemoteVer = ntohl ( iRemoteVer ); if (!( iRemoteVer==SPHINX_SEARCHD_PROTO || iRemoteVer==0x01000000UL ) ) // workaround for all the revisions that sent it in host order... { tAgent.m_sFailure.SetSprintf ( "handshake failure (unexpected protocol version=%d)", iRemoteVer ); AGENT_STATS_INC ( tAgent, m_iWrongReplies ); tAgent.Close (); continue; } // send request NetOutputBuffer_c tOut ( tAgent.m_iSock ); tBuilder.BuildRequest ( tAgent.m_sIndexes.cstr(), tOut, i ); bool bFlushed = tOut.Flush (); // FIXME! handle flush failure? #ifdef TCP_NODELAY int iDisable = 1; if ( bFlushed && tAgent.m_iFamily==AF_INET ) setsockopt ( tAgent.m_iSock, IPPROTO_TCP, TCP_NODELAY, (char*)&iDisable, sizeof(iDisable) ); #endif tAgent.m_eState = AGENT_QUERY; iAgents++; continue; } // check if queried agent replied while we were querying others if ( tAgent.m_eState==AGENT_QUERY && FD_ISSET ( tAgent.m_iSock, &fdsRead ) ) { // do not account agent wall time from here; agent is probably ready tAgent.m_iWall += sphMicroTimer(); tAgent.m_eState = AGENT_PREREPLY; continue; } } } ARRAY_FOREACH ( i, dAgents ) { // check if connection timed out AgentConn_t & tAgent = dAgents[i]; if ( tAgent.m_eState!=AGENT_QUERY && tAgent.m_eState!=AGENT_UNUSED && tAgent.m_eState!=AGENT_RETRY && tAgent.m_eState!=AGENT_PREREPLY ) { // technically, we can end up here via two different routes // a) connect() never finishes in given time frame // b) agent actually accept()s the connection but keeps silence // however, there's no way to tell the two from each other // so we just account both cases as connect() failure tAgent.Close (); tAgent.m_sFailure.SetSprintf ( "connect() timed out" ); tAgent.m_eState = AGENT_RETRY; // do retry on connect() failures AGENT_STATS_INC ( tAgent, m_iTimeoutsConnect ); } } return iAgents; } int WaitForRemoteAgents ( CSphVector & dAgents, int iTimeout, IReplyParser_t & tParser, int64_t * pWaited ) { assert ( iTimeout>=0 ); int iAgents = 0; int64_t tmMaxTimer = sphMicroTimer() + iTimeout*1000; // in microseconds for ( ;; ) { fd_set fdsRead; FD_ZERO ( &fdsRead ); int iMax = 0; bool bDone = true; ARRAY_FOREACH ( iAgent, dAgents ) { AgentConn_t & tAgent = dAgents[iAgent]; if ( tAgent.m_bBlackhole ) continue; if ( tAgent.m_eState==AGENT_QUERY || tAgent.m_eState==AGENT_REPLY || tAgent.m_eState==AGENT_PREREPLY ) { assert ( !tAgent.m_sPath.IsEmpty() || tAgent.m_iPort>0 ); assert ( tAgent.m_iSock>0 ); sphFDSet ( tAgent.m_iSock, &fdsRead ); iMax = Max ( iMax, tAgent.m_iSock ); bDone = false; } } if ( bDone ) break; int64_t tmSelect = sphMicroTimer(); int64_t tmMicroLeft = tmMaxTimer - tmSelect; if ( tmMicroLeft<=0 ) // FIXME? what about iTimeout==0 case? break; struct timeval tvTimeout; tvTimeout.tv_sec = (int)( tmMicroLeft / 1000000 ); // full seconds tvTimeout.tv_usec = (int)( tmMicroLeft % 1000000 ); // microseconds int iSelected = select ( 1+iMax, &fdsRead, NULL, NULL, &tvTimeout ); if ( pWaited ) *pWaited += sphMicroTimer() - tmSelect; if ( iSelected<=0 ) continue; ARRAY_FOREACH ( iAgent, dAgents ) { AgentConn_t & tAgent = dAgents[iAgent]; if ( tAgent.m_bBlackhole ) continue; if (!( tAgent.m_eState==AGENT_QUERY || tAgent.m_eState==AGENT_REPLY || tAgent.m_eState==AGENT_PREREPLY )) continue; if ( !FD_ISSET ( tAgent.m_iSock, &fdsRead ) ) continue; // if there was no reply yet, read reply header bool bFailure = true; for ( ;; ) { if ( tAgent.m_eState==AGENT_QUERY || tAgent.m_eState==AGENT_PREREPLY ) { if ( tAgent.m_eState==AGENT_PREREPLY ) { tAgent.m_iWall -= sphMicroTimer(); tAgent.m_eState = AGENT_QUERY; } // try to read struct { WORD m_iStatus; WORD m_iVer; int m_iLength; } tReplyHeader; STATIC_SIZE_ASSERT ( tReplyHeader, 8 ); if ( sphSockRecv ( tAgent.m_iSock, (char*)&tReplyHeader, sizeof(tReplyHeader) )!=sizeof(tReplyHeader) ) { // bail out if failed tAgent.m_sFailure.SetSprintf ( "failed to receive reply header" ); AGENT_STATS_INC ( tAgent, m_iNetworkErrors ); break; } tReplyHeader.m_iStatus = ntohs ( tReplyHeader.m_iStatus ); tReplyHeader.m_iVer = ntohs ( tReplyHeader.m_iVer ); tReplyHeader.m_iLength = ntohl ( tReplyHeader.m_iLength ); // check the packet if ( tReplyHeader.m_iLength<0 || tReplyHeader.m_iLength>g_iMaxPacketSize ) // FIXME! add reasonable max packet len too { tAgent.m_sFailure.SetSprintf ( "invalid packet size (status=%d, len=%d, max_packet_size=%d)", tReplyHeader.m_iStatus, tReplyHeader.m_iLength, g_iMaxPacketSize ); AGENT_STATS_INC ( tAgent, m_iWrongReplies ); break; } // header received, switch the status assert ( tAgent.m_pReplyBuf==NULL ); tAgent.m_eState = AGENT_REPLY; tAgent.m_pReplyBuf = new BYTE [ tReplyHeader.m_iLength ]; tAgent.m_iReplySize = tReplyHeader.m_iLength; tAgent.m_iReplyRead = 0; tAgent.m_iReplyStatus = tReplyHeader.m_iStatus; if ( !tAgent.m_pReplyBuf ) { // bail out if failed tAgent.m_sFailure.SetSprintf ( "failed to alloc %d bytes for reply buffer", tAgent.m_iReplySize ); break; } } // if we are reading reply, read another chunk if ( tAgent.m_eState==AGENT_REPLY ) { // do read assert ( tAgent.m_iReplyRead0 ); assert ( tAgent.m_iReplyRead+iRes<=tAgent.m_iReplySize ); tAgent.m_iReplyRead += iRes; } // if reply was fully received, parse it if ( tAgent.m_eState==AGENT_REPLY && tAgent.m_iReplyRead==tAgent.m_iReplySize ) { MemInputBuffer_c tReq ( tAgent.m_pReplyBuf, tAgent.m_iReplySize ); // absolve thy former sins tAgent.m_sFailure = ""; // check for general errors/warnings first if ( tAgent.m_iReplyStatus==SEARCHD_WARNING ) { CSphString sAgentWarning = tReq.GetString (); tAgent.m_sFailure.SetSprintf ( "remote warning: %s", sAgentWarning.cstr() ); } else if ( tAgent.m_iReplyStatus==SEARCHD_RETRY ) { tAgent.m_eState = AGENT_RETRY; CSphString sAgentError = tReq.GetString (); tAgent.m_sFailure.SetSprintf ( "remote warning: %s", sAgentError.cstr() ); break; } else if ( tAgent.m_iReplyStatus!=SEARCHD_OK ) { CSphString sAgentError = tReq.GetString (); tAgent.m_sFailure.SetSprintf ( "remote error: %s", sAgentError.cstr() ); break; } // call parser if ( !tParser.ParseReply ( tReq, tAgent, iAgent ) ) break; // check if there was enough data if ( tReq.GetError() ) { tAgent.m_sFailure.SetSprintf ( "incomplete reply" ); AGENT_STATS_INC ( tAgent, m_iWrongReplies ); break; } // all is well iAgents++; tAgent.Close (); tAgent.m_bSuccess = true; } bFailure = false; break; } if ( bFailure ) { tAgent.Close (); tAgent.m_dResults.Reset (); } } } // close timed-out agents ARRAY_FOREACH ( iAgent, dAgents ) { AgentConn_t & tAgent = dAgents[iAgent]; if ( tAgent.m_bBlackhole ) tAgent.Close (); else if ( tAgent.m_eState==AGENT_QUERY || tAgent.m_eState==AGENT_PREREPLY ) { assert ( !tAgent.m_dResults.GetLength() ); assert ( !tAgent.m_bSuccess ); tAgent.Close (); tAgent.m_sFailure.SetSprintf ( "query timed out" ); AGENT_STATS_INC ( tAgent, m_iTimeoutsQuery ); } } return iAgents; } ///////////////////////////////////////////////////////////////////////////// // SEARCH HANDLER ///////////////////////////////////////////////////////////////////////////// struct SearchRequestBuilder_t : public IRequestBuilder_t { SearchRequestBuilder_t ( const CSphVector & dQueries, int iStart, int iEnd ) : m_dQueries ( dQueries ), m_iStart ( iStart ), m_iEnd ( iEnd ) {} virtual void BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int ) const; protected: int CalcQueryLen ( const char * sIndexes, const CSphQuery & q ) const; void SendQuery ( const char * sIndexes, NetOutputBuffer_c & tOut, const CSphQuery & q ) const; protected: const CSphVector & m_dQueries; int m_iStart; int m_iEnd; }; struct SearchReplyParser_t : public IReplyParser_t, public ISphNoncopyable { SearchReplyParser_t ( int iStart, int iEnd, CSphVector & dMvaStorage, CSphVector & dStringsStorage ) : m_iStart ( iStart ) , m_iEnd ( iEnd ) , m_dMvaStorage ( dMvaStorage ) , m_dStringsStorage ( dStringsStorage ) {} virtual bool ParseReply ( MemInputBuffer_c & tReq, AgentConn_t & tAgent, int iAgent ) const; protected: int m_iStart; int m_iEnd; CSphVector & m_dMvaStorage; CSphVector & m_dStringsStorage; }; ///////////////////////////////////////////////////////////////////////////// int SearchRequestBuilder_t::CalcQueryLen ( const char * sIndexes, const CSphQuery & q ) const { int iReqSize = 108 + 2*sizeof(SphDocID_t) + 4*q.m_iWeights + q.m_sSortBy.Length() + strlen ( sIndexes ) + q.m_sGroupBy.Length() + q.m_sGroupSortBy.Length() + q.m_sGroupDistinct.Length() + q.m_sComment.Length() + q.m_sSelect.Length(); iReqSize += q.m_sRawQuery.IsEmpty() ? q.m_sQuery.Length() : q.m_sRawQuery.Length(); if ( q.m_eRanker==SPH_RANK_EXPR ) iReqSize += q.m_sRankerExpr.Length() + 4; ARRAY_FOREACH ( j, q.m_dFilters ) { const CSphFilterSettings & tFilter = q.m_dFilters[j]; iReqSize += 12 + tFilter.m_sAttrName.Length(); // string attr-name; int type; int exclude-flag switch ( tFilter.m_eType ) { case SPH_FILTER_VALUES: iReqSize += 4 + 8*tFilter.GetNumValues (); break; // int values-count; uint64[] values case SPH_FILTER_RANGE: iReqSize += 16; break; // uint64 min-val, max-val case SPH_FILTER_FLOATRANGE: iReqSize += 8; break; // int/float min-val,max-val } } if ( q.m_bGeoAnchor ) iReqSize += 16 + q.m_sGeoLatAttr.Length() + q.m_sGeoLongAttr.Length(); // string lat-attr, long-attr; float lat, long ARRAY_FOREACH ( i, q.m_dIndexWeights ) iReqSize += 8 + q.m_dIndexWeights[i].m_sName.Length(); // string index-name; int index-weight ARRAY_FOREACH ( i, q.m_dFieldWeights ) iReqSize += 8 + q.m_dFieldWeights[i].m_sName.Length(); // string field-name; int field-weight ARRAY_FOREACH ( i, q.m_dOverrides ) iReqSize += 12 + q.m_dOverrides[i].m_sAttr.Length() + // string attr-name; int type; int values-count ( q.m_dOverrides[i].m_eAttrType==SPH_ATTR_BIGINT ? 16 : 12 )*q.m_dOverrides[i].m_dValues.GetLength(); // ( bigint id; int/float/bigint value )[] values return iReqSize; } void SearchRequestBuilder_t::SendQuery ( const char * sIndexes, NetOutputBuffer_c & tOut, const CSphQuery & q ) const { tOut.SendInt ( 0 ); // offset is 0 tOut.SendInt ( q.m_iMaxMatches ); // limit is MAX_MATCHES tOut.SendInt ( (DWORD)q.m_eMode ); // match mode tOut.SendInt ( (DWORD)q.m_eRanker ); // ranking mode if ( q.m_eRanker==SPH_RANK_EXPR ) tOut.SendString ( q.m_sRankerExpr.cstr() ); tOut.SendInt ( q.m_eSort ); // sort mode tOut.SendString ( q.m_sSortBy.cstr() ); // sort attr if ( q.m_sRawQuery.IsEmpty() ) tOut.SendString ( q.m_sQuery.cstr() ); else tOut.SendString ( q.m_sRawQuery.cstr() ); // query tOut.SendInt ( q.m_iWeights ); for ( int j=0; j0 ); tAgent.m_dResults.Resize ( iResults ); for ( int iRes=0; iResg_iMaxMatches ) { tAgent.m_sFailure.SetSprintf ( "invalid match count received (count=%d)", iMatches ); return false; } int bAgent64 = tReq.GetInt (); #if !USE_64BIT if ( bAgent64 ) tAgent.m_sFailure.SetSprintf ( "id64 agent, id32 master, docids might be wrapped" ); #endif assert ( !tRes.m_dMatches.GetLength() ); if ( iMatches ) { tRes.m_dMatches.Resize ( iMatches ); ARRAY_FOREACH ( i, tRes.m_dMatches ) { CSphMatch & tMatch = tRes.m_dMatches[i]; tMatch.Reset ( tSchema.GetRowSize() ); tMatch.m_iDocID = bAgent64 ? (SphDocID_t)tReq.GetUint64() : tReq.GetDword(); tMatch.m_iWeight = tReq.GetInt (); for ( int j=0; j>32 ) ); } } } else if ( tAttr.m_eAttrType==SPH_ATTR_FLOAT ) { float fRes = tReq.GetFloat(); tMatch.SetAttr ( tAttr.m_tLocator, sphF2DW(fRes) ); } else if ( tAttr.m_eAttrType==SPH_ATTR_BIGINT ) { tMatch.SetAttr ( tAttr.m_tLocator, tReq.GetUint64() ); } else if ( tAttr.m_eAttrType==SPH_ATTR_STRING ) { CSphString sValue = tReq.GetString(); int iLen = sValue.Length(); int iOff = m_dStringsStorage.GetLength(); tMatch.SetAttr ( tAttr.m_tLocator, iOff ); m_dStringsStorage.Resize ( iOff+3+iLen ); BYTE * pBuf = &m_dStringsStorage[iOff]; pBuf += sphPackStrlen ( pBuf, iLen ); memcpy ( pBuf, sValue.cstr(), iLen ); } else { tMatch.SetAttr ( tAttr.m_tLocator, tReq.GetDword() ); } } } } // read totals (retrieved count, total count, query time, word count) int iRetrieved = tReq.GetInt (); tRes.m_iTotalMatches = (unsigned int)tReq.GetInt (); tRes.m_iQueryTime = tReq.GetInt (); const int iWordsCount = tReq.GetInt (); // FIXME! sanity check? if ( iRetrieved!=iMatches ) { tAgent.m_sFailure.SetSprintf ( "expected %d retrieved documents, got %d", iMatches, iRetrieved ); return false; } // read per-word stats for ( int i=0; i dDst; for ( int i=0; i=0 ) { const CSphColumnInfo & tSrcAttr = tSrc.GetAttr ( iSrcIdx ); // should seamlessly convert ( bool > float ) | ( bool > int > bigint ) ESphAttr eDst = dDst[i].m_eAttrType; ESphAttr eSrc = tSrcAttr.m_eAttrType; bool bSame = ( eDst==eSrc ) || ( ( eDst==SPH_ATTR_FLOAT && eSrc==SPH_ATTR_BOOL ) || ( eDst==SPH_ATTR_BOOL && eSrc==SPH_ATTR_FLOAT ) ) || ( ( eDst==SPH_ATTR_BOOL || eDst==SPH_ATTR_INTEGER || eDst==SPH_ATTR_BIGINT ) && ( eSrc==SPH_ATTR_BOOL || eSrc==SPH_ATTR_INTEGER || eSrc==SPH_ATTR_BIGINT ) ); int iDstBitCount = dDst[i].m_tLocator.m_iBitCount; int iSrcBitCount = tSrcAttr.m_tLocator.m_iBitCount; if ( !bSame ) { // different types? remove the attr iSrcIdx = -1; bEqual = false; } else if ( iDstBitCount!=iSrcBitCount ) { // different bit sizes? choose the max one dDst[i].m_tLocator.m_iBitCount = Max ( iDstBitCount, iSrcBitCount ); bEqual = false; if ( iDstBitCountm_iOldVersion ) return true; if ( pQuery->m_iOldGroups>0 || pQuery->m_iOldMinGID!=0 || pQuery->m_iOldMaxGID!=UINT_MAX ) { int iAttr = -1; for ( int i=0; iGetAttrsCount(); i++ ) if ( pSchema->GetAttr(i).m_eAttrType==SPH_ATTR_INTEGER ) { iAttr = i; break; } if ( iAttr<0 ) { sError.SetSprintf ( "index '%s': no group attribute found", sIndexName ); return false; } CSphFilterSettings tFilter; tFilter.m_sAttrName = pSchema->GetAttr(iAttr).m_sName; tFilter.m_dValues.Resize ( pQuery->m_iOldGroups ); ARRAY_FOREACH ( i, tFilter.m_dValues ) tFilter.m_dValues[i] = pQuery->m_pOldGroups[i]; tFilter.m_uMinValue = pQuery->m_iOldMinGID; tFilter.m_uMaxValue = pQuery->m_iOldMaxGID; pQuery->m_dFilters.Add ( tFilter ); } if ( pQuery->m_iOldMinTS!=0 || pQuery->m_iOldMaxTS!=UINT_MAX ) { int iAttr = -1; for ( int i=0; iGetAttrsCount(); i++ ) if ( pSchema->GetAttr(i).m_eAttrType==SPH_ATTR_TIMESTAMP ) { iAttr = i; break; } if ( iAttr<0 ) { sError.SetSprintf ( "index '%s': no timestamp attribute found", sIndexName ); return false; } CSphFilterSettings tFilter; tFilter.m_sAttrName = pSchema->GetAttr(iAttr).m_sName; tFilter.m_uMinValue = pQuery->m_iOldMinTS; tFilter.m_uMaxValue = pQuery->m_iOldMaxTS; pQuery->m_dFilters.Add ( tFilter ); } pQuery->m_iOldVersion = 0; return true; } void ParseIndexList ( const CSphString & sIndexes, CSphVector & dOut ) { CSphString sSplit = sIndexes; char * p = (char*)sSplit.cstr(); while ( *p ) { // skip non-alphas while ( (*p) && !sphIsAlpha(*p) ) p++; if ( !(*p) ) break; // this is my next index name const char * sNext = p; while ( sphIsAlpha(*p) ) p++; assert ( sNext!=p ); if ( *p ) *p++ = '\0'; // if it was not the end yet, we'll continue from next char dOut.Add ( sNext ); } } void CheckQuery ( const CSphQuery & tQuery, CSphString & sError ) { sError = NULL; if ( tQuery.m_eMode<0 || tQuery.m_eMode>SPH_MATCH_TOTAL ) { sError.SetSprintf ( "invalid match mode %d", tQuery.m_eMode ); return; } if ( tQuery.m_eRanker<0 || tQuery.m_eRanker>SPH_RANK_TOTAL ) { sError.SetSprintf ( "invalid ranking mode %d", tQuery.m_eRanker ); return; } if ( tQuery.m_iMaxMatches<1 || tQuery.m_iMaxMatches>g_iMaxMatches ) { sError.SetSprintf ( "per-query max_matches=%d out of bounds (per-server max_matches=%d)", tQuery.m_iMaxMatches, g_iMaxMatches ); return; } if ( tQuery.m_iOffset<0 || tQuery.m_iOffset>=tQuery.m_iMaxMatches ) { sError.SetSprintf ( "offset out of bounds (offset=%d, max_matches=%d)", tQuery.m_iOffset, tQuery.m_iMaxMatches ); return; } if ( tQuery.m_iLimit<0 ) { sError.SetSprintf ( "limit out of bounds (limit=%d)", tQuery.m_iLimit ); return; } if ( tQuery.m_iCutoff<0 ) { sError.SetSprintf ( "cutoff out of bounds (cutoff=%d)", tQuery.m_iCutoff ); return; } if ( tQuery.m_iRetryCount<0 || tQuery.m_iRetryCount>MAX_RETRY_COUNT ) { sError.SetSprintf ( "retry count out of bounds (count=%d)", tQuery.m_iRetryCount ); return; } if ( tQuery.m_iRetryDelay<0 || tQuery.m_iRetryDelay>MAX_RETRY_DELAY ) { sError.SetSprintf ( "retry delay out of bounds (delay=%d)", tQuery.m_iRetryDelay ); return; } } void PrepareQueryEmulation ( CSphQuery * pQuery ) { // sort filters ARRAY_FOREACH ( i, pQuery->m_dFilters ) pQuery->m_dFilters[i].m_dValues.Sort(); // sort overrides ARRAY_FOREACH ( i, pQuery->m_dOverrides ) pQuery->m_dOverrides[i].m_dValues.Sort (); // fixup query pQuery->m_sQuery = pQuery->m_sRawQuery; if ( pQuery->m_eMode==SPH_MATCH_BOOLEAN ) pQuery->m_eRanker = SPH_RANK_NONE; if ( pQuery->m_eMode!=SPH_MATCH_ALL && pQuery->m_eMode!=SPH_MATCH_ANY && pQuery->m_eMode!=SPH_MATCH_PHRASE ) return; const char * szQuery = pQuery->m_sRawQuery.cstr (); int iQueryLen = szQuery ? strlen(szQuery) : 0; pQuery->m_sQuery.Reserve ( iQueryLen*2+8 ); char * szRes = (char*) pQuery->m_sQuery.cstr (); char c; if ( pQuery->m_eMode==SPH_MATCH_ANY || pQuery->m_eMode==SPH_MATCH_PHRASE ) *szRes++ = '\"'; while ( ( c = *szQuery++ )!=0 ) { // must be in sync with EscapeString (php api) const char sMagics[] = "<\\()|-!@~\"&/^$="; for ( const char * s = sMagics; *s; s++ ) if ( c==*s ) { *szRes++ = '\\'; break; } *szRes++ = c; } switch ( pQuery->m_eMode ) { case SPH_MATCH_ALL: pQuery->m_eRanker = SPH_RANK_PROXIMITY; *szRes = '\0'; break; case SPH_MATCH_ANY: pQuery->m_eRanker = SPH_RANK_MATCHANY; strncpy ( szRes, "\"/1", 8 ); break; case SPH_MATCH_PHRASE: pQuery->m_eRanker = SPH_RANK_PROXIMITY; *szRes++ = '\"'; *szRes = '\0'; break; default: return; } } bool ParseSearchQuery ( InputBuffer_c & tReq, CSphQuery & tQuery, int iVer, int iMasterVer ) { tQuery.m_iOldVersion = iVer; // v.1.0. mode, limits, weights, ID/TS ranges tQuery.m_iOffset = tReq.GetInt (); tQuery.m_iLimit = tReq.GetInt (); tQuery.m_eMode = (ESphMatchMode) tReq.GetInt (); if ( iVer>=0x110 ) { tQuery.m_eRanker = (ESphRankMode) tReq.GetInt (); if ( tQuery.m_eRanker==SPH_RANK_EXPR ) tQuery.m_sRankerExpr = tReq.GetString(); } tQuery.m_eSort = (ESphSortOrder) tReq.GetInt (); if ( iVer<=0x101 ) tQuery.m_iOldGroups = tReq.GetDwords ( &tQuery.m_pOldGroups, g_iMaxFilterValues, "invalid group count %d (should be in 0..%d range)" ); if ( iVer>=0x102 ) { tQuery.m_sSortBy = tReq.GetString (); tQuery.m_sSortBy.ToLower (); } tQuery.m_sRawQuery = tReq.GetString (); tQuery.m_iWeights = tReq.GetDwords ( (DWORD**)&tQuery.m_pWeights, SPH_MAX_FIELDS, "invalid weight count %d (should be in 0..%d range)" ); tQuery.m_sIndexes = tReq.GetString (); bool bIdrange64 = false; if ( iVer>=0x108 ) bIdrange64 = ( tReq.GetInt()!=0 ); SphDocID_t uMinID = 0; SphDocID_t uMaxID = DOCID_MAX; if ( bIdrange64 ) { uMinID = (SphDocID_t)tReq.GetUint64 (); uMaxID = (SphDocID_t)tReq.GetUint64 (); // FIXME? could report clamp here if I'm id32 and client passed id64 range, // but frequently this won't affect anything at all } else { uMinID = tReq.GetDword (); uMaxID = tReq.GetDword (); } if ( iVer<0x108 && uMaxID==0xffffffffUL ) uMaxID = 0; // fixup older clients which send 32-bit UINT_MAX by default if ( uMaxID==0 ) uMaxID = DOCID_MAX; // v.1.0, v.1.1 if ( iVer<=0x101 ) { tQuery.m_iOldMinTS = tReq.GetDword (); tQuery.m_iOldMaxTS = tReq.GetDword (); } // v.1.1 specific if ( iVer==0x101 ) { tQuery.m_iOldMinGID = tReq.GetDword (); tQuery.m_iOldMaxGID = tReq.GetDword (); } // v.1.2 if ( iVer>=0x102 ) { int iAttrFilters = tReq.GetInt (); if ( iAttrFilters>g_iMaxFilters ) { tReq.SendErrorReply ( "too much attribute filters (req=%d, max=%d)", iAttrFilters, g_iMaxFilters ); return false; } const int MAX_ERROR_SET_BUFFER = 128; char sSetError[MAX_ERROR_SET_BUFFER]; tQuery.m_dFilters.Resize ( iAttrFilters ); ARRAY_FOREACH ( iFilter, tQuery.m_dFilters ) { CSphFilterSettings & tFilter = tQuery.m_dFilters[iFilter]; tFilter.m_sAttrName = tReq.GetString (); tFilter.m_sAttrName.ToLower (); snprintf ( sSetError, MAX_ERROR_SET_BUFFER , "invalid attribute '%s'(%d) set length %s (should be in 0..%s range)", tFilter.m_sAttrName.cstr(), iFilter, "%d", "%d" ); if ( iVer>=0x10E ) { // v.1.14+ tFilter.m_eType = (ESphFilter) tReq.GetDword (); switch ( tFilter.m_eType ) { case SPH_FILTER_RANGE: tFilter.m_uMinValue = ( iVer>=0x114 ) ? tReq.GetUint64() : tReq.GetDword (); tFilter.m_uMaxValue = ( iVer>=0x114 ) ? tReq.GetUint64() : tReq.GetDword (); break; case SPH_FILTER_FLOATRANGE: tFilter.m_fMinValue = tReq.GetFloat (); tFilter.m_fMaxValue = tReq.GetFloat (); break; case SPH_FILTER_VALUES: { bool bRes = ( iVer>=0x114 ) ? tReq.GetQwords ( tFilter.m_dValues, g_iMaxFilterValues, sSetError ) : tReq.GetDwords ( tFilter.m_dValues, g_iMaxFilterValues, sSetError ); if ( !bRes ) return false; } break; default: tReq.SendErrorReply ( "unknown filter type (type-id=%d)", tFilter.m_eType ); return false; } } else { // pre-1.14 if ( !tReq.GetDwords ( tFilter.m_dValues, g_iMaxFilterValues, sSetError ) ) return false; if ( !tFilter.m_dValues.GetLength() ) { // 0 length means this is range, not set tFilter.m_uMinValue = tReq.GetDword (); tFilter.m_uMaxValue = tReq.GetDword (); } tFilter.m_eType = tFilter.m_dValues.GetLength() ? SPH_FILTER_VALUES : SPH_FILTER_RANGE; } if ( iVer>=0x106 ) tFilter.m_bExclude = !!tReq.GetDword (); } } // now add id range filter if ( uMinID!=0 || uMaxID!=DOCID_MAX ) { CSphFilterSettings & tFilter = tQuery.m_dFilters.Add(); tFilter.m_sAttrName = "@id"; tFilter.m_eType = SPH_FILTER_RANGE; tFilter.m_uMinValue = uMinID; tFilter.m_uMaxValue = uMaxID; } // v.1.3 if ( iVer>=0x103 ) { tQuery.m_eGroupFunc = (ESphGroupBy) tReq.GetDword (); tQuery.m_sGroupBy = tReq.GetString (); tQuery.m_sGroupBy.ToLower (); } // v.1.4 tQuery.m_iMaxMatches = g_iMaxMatches; if ( iVer>=0x104 ) tQuery.m_iMaxMatches = tReq.GetInt (); // v.1.5, v.1.7 if ( iVer>=0x107 ) { tQuery.m_sGroupSortBy = tReq.GetString (); } else if ( iVer>=0x105 ) { bool bSortByGroup = ( tReq.GetInt()!=0 ); if ( !bSortByGroup ) { char sBuf[256]; switch ( tQuery.m_eSort ) { case SPH_SORT_RELEVANCE: tQuery.m_sGroupSortBy = "@weight desc"; break; case SPH_SORT_ATTR_DESC: case SPH_SORT_ATTR_ASC: snprintf ( sBuf, sizeof(sBuf), "%s %s", tQuery.m_sSortBy.cstr(), tQuery.m_eSort==SPH_SORT_ATTR_ASC ? "asc" : "desc" ); tQuery.m_sGroupSortBy = sBuf; break; case SPH_SORT_EXTENDED: tQuery.m_sGroupSortBy = tQuery.m_sSortBy; break; default: tReq.SendErrorReply ( "INTERNAL ERROR: unsupported sort mode %d in groupby sort fixup", tQuery.m_eSort ); return false; } } } // v.1.9 if ( iVer>=0x109 ) tQuery.m_iCutoff = tReq.GetInt(); // v.1.10 if ( iVer>=0x10A ) { tQuery.m_iRetryCount = tReq.GetInt (); tQuery.m_iRetryDelay = tReq.GetInt (); } // v.1.11 if ( iVer>=0x10B ) { tQuery.m_sGroupDistinct = tReq.GetString (); tQuery.m_sGroupDistinct.ToLower(); } // v.1.14 if ( iVer>=0x10E ) { tQuery.m_bGeoAnchor = ( tReq.GetInt()!=0 ); if ( tQuery.m_bGeoAnchor ) { tQuery.m_sGeoLatAttr = tReq.GetString (); tQuery.m_sGeoLongAttr = tReq.GetString (); tQuery.m_fGeoLatitude = tReq.GetFloat (); tQuery.m_fGeoLongitude = tReq.GetFloat (); } } // v.1.15 if ( iVer>=0x10F ) { tQuery.m_dIndexWeights.Resize ( tReq.GetInt() ); // FIXME! add sanity check ARRAY_FOREACH ( i, tQuery.m_dIndexWeights ) { tQuery.m_dIndexWeights[i].m_sName = tReq.GetString (); tQuery.m_dIndexWeights[i].m_iValue = tReq.GetInt (); } } // v.1.17 if ( iVer>=0x111 ) tQuery.m_uMaxQueryMsec = tReq.GetDword (); // v.1.18 if ( iVer>=0x112 ) { tQuery.m_dFieldWeights.Resize ( tReq.GetInt() ); // FIXME! add sanity check ARRAY_FOREACH ( i, tQuery.m_dFieldWeights ) { tQuery.m_dFieldWeights[i].m_sName = tReq.GetString (); tQuery.m_dFieldWeights[i].m_iValue = tReq.GetInt (); } } // v.1.19 if ( iVer>=0x113 ) tQuery.m_sComment = tReq.GetString (); // v.1.21 if ( iVer>=0x115 ) { tQuery.m_dOverrides.Resize ( tReq.GetInt() ); // FIXME! add sanity check ARRAY_FOREACH ( i, tQuery.m_dOverrides ) { CSphAttrOverride & tOverride = tQuery.m_dOverrides[i]; tOverride.m_sAttr = tReq.GetString (); tOverride.m_eAttrType = (ESphAttr) tReq.GetDword (); tOverride.m_dValues.Resize ( tReq.GetInt() ); // FIXME! add sanity check ARRAY_FOREACH ( iVal, tOverride.m_dValues ) { CSphAttrOverride::IdValuePair_t & tEntry = tOverride.m_dValues[iVal]; tEntry.m_uDocID = (SphDocID_t) tReq.GetUint64 (); if ( tOverride.m_eAttrType==SPH_ATTR_FLOAT ) tEntry.m_fValue = tReq.GetFloat (); else if ( tOverride.m_eAttrType==SPH_ATTR_BIGINT ) tEntry.m_uValue = tReq.GetUint64 (); else tEntry.m_uValue = tReq.GetDword (); } } } // v.1.22 if ( iVer>=0x116 ) { tQuery.m_sSelect = tReq.GetString (); tQuery.m_bAgent = ( iMasterVer>0 ); CSphString sError; if ( !tQuery.ParseSelectList ( sError ) ) { tReq.SendErrorReply ( "select: %s", sError.cstr() ); return false; } } // master v.1.0 tQuery.m_eCollation = g_eCollation; if ( iMasterVer>=0x001 ) { tQuery.m_eCollation = (ESphCollation)tReq.GetDword(); } ///////////////////// // additional checks ///////////////////// if ( tReq.GetError() ) { tReq.SendErrorReply ( "invalid or truncated request" ); return false; } CSphString sError; CheckQuery ( tQuery, sError ); if ( !sError.IsEmpty() ) { tReq.SendErrorReply ( "%s", sError.cstr() ); return false; } // now prepare it for the engine PrepareQueryEmulation ( &tQuery ); // all ok return true; } ////////////////////////////////////////////////////////////////////////// void LogQueryPlain ( const CSphQuery & tQuery, const CSphQueryResult & tRes ) { assert ( g_eLogFormat==LOG_FORMAT_PLAIN ); if ( ( !g_bQuerySyslog && g_iQueryLogFile<0 ) || !tRes.m_sError.IsEmpty() ) return; char sBuf[2048]; char * p = sBuf; char * pMax = sBuf+sizeof(sBuf)-4; // [time] #if USE_SYSLOG if ( !g_bQuerySyslog ) { #endif *p++ = '['; p += sphFormatCurrentTime ( p, pMax-p ); *p++ = ']'; #if USE_SYSLOG } else p += snprintf ( p, pMax-p, "[query]" ); #endif // querytime sec int iQueryTime = Max ( tRes.m_iQueryTime, 0 ); p += snprintf ( p, pMax-p, " %d.%03d sec", iQueryTime/1000, iQueryTime%1000 ); // optional multi-query multiplier if ( tRes.m_iMultiplier>1 ) p += snprintf ( p, pMax-p, " x%d", tRes.m_iMultiplier ); // [matchmode/numfilters/sortmode matches (offset,limit) static const char * sModes [ SPH_MATCH_TOTAL ] = { "all", "any", "phr", "bool", "ext", "scan", "ext2" }; static const char * sSort [ SPH_SORT_TOTAL ] = { "rel", "attr-", "attr+", "tsegs", "ext", "expr" }; p += snprintf ( p, pMax-p, " [%s/%d/%s "INT64_FMT" (%d,%d)", sModes [ tQuery.m_eMode ], tQuery.m_dFilters.GetLength(), sSort [ tQuery.m_eSort ], tRes.m_iTotalMatches, tQuery.m_iOffset, tQuery.m_iLimit ); // optional groupby info if ( !tQuery.m_sGroupBy.IsEmpty() ) p += snprintf ( p, pMax-p, " @%s", tQuery.m_sGroupBy.cstr() ); // ] [indexes] p += snprintf ( p, pMax-p, "] [%s]", tQuery.m_sIndexes.cstr() ); // optional performance counters if ( g_bIOStats || g_bCpuStats ) { const CSphIOStats & IOStats = sphStopIOStats (); *p++ = ' '; char * pBracket = p; // can't fill yet, will be overwritten by sprintfs if ( g_bIOStats ) p += snprintf ( p, pMax-p, " ios=%d kb=%d.%d ioms=%d.%d", IOStats.m_iReadOps, (int)( IOStats.m_iReadBytes/1024 ), (int)( IOStats.m_iReadBytes%1024 )*10/1024, (int)( IOStats.m_iReadTime/1000 ), (int)( IOStats.m_iReadTime%1000 )/100 ); if ( g_bCpuStats ) p += snprintf ( p, pMax-p, " cpums=%d.%d", (int)( tRes.m_iCpuTime/1000 ), (int)( tRes.m_iCpuTime%1000 )/100 ); *pBracket = '['; if ( p0 ) { va_list ap; va_start ( ap, sFormat ); iLen = vsnprintf ( m_pCur, Left(), sFormat, ap ); va_end ( ap ); } if ( iLen!=-1 && Length()+iLen<=m_iSize ) { m_pCur += iLen; break; } else { Grow(); } } } void AppendEscapedFixupSpace ( const char * sText ) { if ( !sText || !*sText ) return; const char * pBuf = sText; int iEsc = 0; for ( ; *pBuf; ) { char s = *pBuf++; iEsc = ( s=='\\' || s=='\'' ) ? ( iEsc+1 ) : iEsc; } int iLen = pBuf-sText; if ( Left()( m_iSize*2 ) ) iNewLen = m_iSize+iAdd; int iUsed = Length(); char * pDynamic = new char [iNewLen]; memcpy ( pDynamic, m_pDynamic ? m_pDynamic : m_sStatic, iUsed ); SafeDeleteArray ( m_pDynamic ); m_pDynamic = pDynamic; m_pCur = pDynamic + iUsed; m_iSize = iNewLen; } }; void FormatOrderBy ( StringBuffer_c * pBuf, const char * sPrefix, ESphSortOrder eSort, const CSphString & sSort ) { assert ( pBuf ); if ( eSort==SPH_SORT_EXTENDED && sSort=="@weight desc" ) return; switch ( eSort ) { case SPH_SORT_ATTR_DESC: pBuf->Append ( " %s %s DESC", sPrefix, sSort.cstr() ); break; case SPH_SORT_ATTR_ASC: pBuf->Append ( " %s %s ASC", sPrefix, sSort.cstr() ); break; case SPH_SORT_TIME_SEGMENTS: pBuf->Append ( " %s TIME_SEGMENT(%s)", sPrefix, sSort.cstr() ); break; case SPH_SORT_EXTENDED: pBuf->Append ( " %s %s", sPrefix, sSort.cstr() ); break; case SPH_SORT_EXPR: pBuf->Append ( " %s BUILTIN_EXPR()", sPrefix ); break; default: pBuf->Append ( " %s mode-%d", sPrefix, (int)eSort ); break; } } void LogQuerySphinxql ( const CSphQuery & q, const CSphQueryResult & tRes, const CSphVector & dAgentTimes ) { assert ( g_eLogFormat==LOG_FORMAT_SPHINXQL ); if ( g_iQueryLogFile<0 ) return; StringBuffer_c tBuf; // get connection id int iCid = ( g_eWorkers!=MPM_THREADS ) ? g_iConnID : *(int*) sphThreadGet ( g_tConnKey ); // time, conn id, wall, found int iQueryTime = Max ( tRes.m_iQueryTime, 0 ); tBuf.Append ( "/""* " ); tBuf.AppendCurrentTime(); if ( tRes.m_iMultiplier>1 ) tBuf.Append ( " conn %d wall %d.%03d x%d found "INT64_FMT" *""/ ", iCid, iQueryTime/1000, iQueryTime%1000, tRes.m_iMultiplier, tRes.m_iTotalMatches ); else tBuf.Append ( " conn %d wall %d.%03d found "INT64_FMT" *""/ ", iCid, iQueryTime/1000, iQueryTime%1000, tRes.m_iTotalMatches ); /////////////////////////////////// // format request as SELECT query /////////////////////////////////// tBuf.Append ( "SELECT %s FROM %s", q.m_sSelect.cstr(), q.m_sIndexes.cstr() ); // WHERE clause // (m_sRawQuery is empty when using MySQL handler) const CSphString & sQuery = q.m_sQuery; if ( !sQuery.IsEmpty() || q.m_dFilters.GetLength() ) { bool bDeflowered = false; tBuf.Append ( " WHERE" ); if ( !sQuery.IsEmpty() ) { tBuf.Append ( " MATCH('" ); tBuf.AppendEscapedFixupSpace ( sQuery.cstr() ); tBuf.Append ( "')" ); bDeflowered = true; } ARRAY_FOREACH ( i, q.m_dFilters ) { if ( bDeflowered ) tBuf.Append ( " AND" ); const CSphFilterSettings & f = q.m_dFilters[i]; switch ( f.m_eType ) { case SPH_FILTER_VALUES: if ( f.m_dValues.GetLength()==1 ) { if ( f.m_bExclude ) tBuf.Append ( " %s!="INT64_FMT, f.m_sAttrName.cstr(), (int64_t)f.m_dValues[0] ); else tBuf.Append ( " %s="INT64_FMT, f.m_sAttrName.cstr(), (int64_t)f.m_dValues[0] ); } else { if ( f.m_bExclude ) tBuf.Append ( " %s NOT IN (", f.m_sAttrName.cstr() ); else tBuf.Append ( " %s IN (", f.m_sAttrName.cstr() ); ARRAY_FOREACH ( j, f.m_dValues ) { if ( j ) tBuf.Append ( ","INT64_FMT, (int64_t)f.m_dValues[j] ); else tBuf.Append ( INT64_FMT, (int64_t)f.m_dValues[j] ); } tBuf.Append ( ")" ); } break; case SPH_FILTER_RANGE: if ( f.m_bExclude ) tBuf.Append ( " %s NOT BETWEEN "INT64_FMT" AND "INT64_FMT, f.m_sAttrName.cstr(), (int64_t)f.m_uMinValue, (int64_t)f.m_uMaxValue ); else tBuf.Append ( " %s BETWEEN "INT64_FMT" AND "INT64_FMT, f.m_sAttrName.cstr(), (int64_t)f.m_uMinValue, (int64_t)f.m_uMaxValue ); break; case SPH_FILTER_FLOATRANGE: if ( f.m_bExclude ) tBuf.Append ( " %s NOT BETWEEN %f AND %f", f.m_sAttrName.cstr(), f.m_fMinValue, f.m_fMaxValue ); else tBuf.Append ( " %s BETWEEN %f AND %f", f.m_sAttrName.cstr(), f.m_fMinValue, f.m_fMaxValue ); break; default: tBuf.Append ( " 1 /""* oops, unknown filter type *""/" ); break; } } } // ORDER BY and/or GROUP BY clause if ( q.m_sGroupBy.IsEmpty() ) { if ( !q.m_sSortBy.IsEmpty() ) // case API SPH_MATCH_EXTENDED2 - SPH_SORT_RELEVANCE tBuf.Append ( " ORDER BY %s", q.m_sSortBy.cstr() ); } else { tBuf.Append ( " GROUP BY %s", q.m_sGroupBy.cstr() ); FormatOrderBy ( &tBuf, "WITHIN GROUP ORDER BY", q.m_eSort, q.m_sSortBy ); if ( q.m_sGroupSortBy!="@group desc" ) FormatOrderBy ( &tBuf, "ORDER BY", SPH_SORT_EXTENDED, q.m_sGroupSortBy ); } // LIMIT clause if ( q.m_iOffset!=0 || q.m_iLimit!=20 ) tBuf.Append ( " LIMIT %d,%d", q.m_iOffset, q.m_iLimit ); // OPTION clause int iOpts = 0; if ( q.m_iMaxMatches!=1000 ) { tBuf.Append ( iOpts++ ? ", " : " OPTION " ); tBuf.Append ( "max_matches=%d", q.m_iMaxMatches ); } if ( !q.m_sComment.IsEmpty() ) { tBuf.Append ( iOpts++ ? ", " : " OPTION " ); tBuf.Append ( "comment='%s'", q.m_sComment.cstr() ); // FIXME! escape, replace newlines.. } if ( q.m_eRanker!=SPH_RANK_DEFAULT ) { const char * sRanker = "proximity_bm25"; switch ( q.m_eRanker ) { case SPH_RANK_BM25: sRanker = "bm25"; break; case SPH_RANK_NONE: sRanker = "none"; break; case SPH_RANK_WORDCOUNT: sRanker = "wordcount"; break; case SPH_RANK_PROXIMITY: sRanker = "proximity"; break; case SPH_RANK_MATCHANY: sRanker = "matchany"; break; case SPH_RANK_FIELDMASK: sRanker = "fieldmask"; break; case SPH_RANK_SPH04: sRanker = "sph04"; break; case SPH_RANK_EXPR: sRanker = "expr"; break; default: break; } tBuf.Append ( iOpts++ ? ", " : " OPTION " ); tBuf.Append ( "ranker=%s", sRanker ); } // finish SQL statement tBuf.Append ( ";" ); /////////////// // query stats /////////////// if ( !tRes.m_sError.IsEmpty() ) { // all we have is an error tBuf.Append ( " # error=%s", tRes.m_sError.cstr() ); } else if ( g_bIOStats || g_bCpuStats || dAgentTimes.GetLength() || !tRes.m_sWarning.IsEmpty() ) { // got some extra data, add a comment tBuf.Append ( " #" ); // performance counters if ( g_bIOStats || g_bCpuStats ) { const CSphIOStats & IOStats = sphStopIOStats (); if ( g_bIOStats ) tBuf.Append ( " ios=%d kb=%d.%d ioms=%d.%d", IOStats.m_iReadOps, (int)( IOStats.m_iReadBytes/1024 ), (int)( IOStats.m_iReadBytes%1024 )*10/1024, (int)( IOStats.m_iReadTime/1000 ), (int)( IOStats.m_iReadTime%1000 )/100 ); if ( g_bCpuStats ) tBuf.Append ( " cpums=%d.%d", (int)( tRes.m_iCpuTime/1000 ), (int)( tRes.m_iCpuTime%1000 )/100 ); } // per-agent times if ( dAgentTimes.GetLength() ) { tBuf.Append ( " agents=(" ); ARRAY_FOREACH ( i, dAgentTimes ) tBuf.Append ( i ? ", %d.%03d" : "%d.%03d", (int)(dAgentTimes[i]/1000000), (int)((dAgentTimes[i]/1000)%1000) ); tBuf.Append ( ")" ); } // warning if ( !tRes.m_sWarning.IsEmpty() ) tBuf.Append ( " warning=%s", tRes.m_sWarning.cstr() ); } // line feed tBuf.Append ( "\n" ); lseek ( g_iQueryLogFile, 0, SEEK_END ); sphWrite ( g_iQueryLogFile, tBuf.cstr(), tBuf.Length() ); } void LogQuery ( const CSphQuery & q, const CSphQueryResult & tRes, const CSphVector & dAgentTimes ) { switch ( g_eLogFormat ) { case LOG_FORMAT_PLAIN: LogQueryPlain ( q, tRes ); break; case LOG_FORMAT_SPHINXQL: LogQuerySphinxql ( q, tRes, dAgentTimes ); break; } } void LogSphinxqlError ( const char * sStmt, const char * sError ) { if ( g_eLogFormat!=LOG_FORMAT_SPHINXQL || g_iQueryLogFile<0 || !sStmt || !sError ) return; // time, conn id, query, error StringBuffer_c tBuf; int iCid = ( g_eWorkers!=MPM_THREADS ) ? g_iConnID : *(int*) sphThreadGet ( g_tConnKey ); tBuf.Append ( "/""* " ); tBuf.AppendCurrentTime(); tBuf.Append ( " conn %d *""/ %s # error=%s\n", iCid, sStmt, sError ); lseek ( g_iQueryLogFile, 0, SEEK_END ); sphWrite ( g_iQueryLogFile, tBuf.cstr(), tBuf.Length() ); } ////////////////////////////////////////////////////////////////////////// // internals attributes are last no need to send them static int SendGetAttrCount ( const CSphSchema & tSchema ) { int iCount = tSchema.GetAttrsCount(); if ( iCount && sphIsSortStringInternal ( tSchema.GetAttr ( iCount-1 ).m_sName.cstr() ) ) { for ( int i=iCount-1; i>=0 && sphIsSortStringInternal ( tSchema.GetAttr(i).m_sName.cstr() ); i-- ) { iCount = i; } } return iCount; } int CalcResultLength ( int iVer, const CSphQueryResult * pRes, const CSphVector & dTag2Pools, bool bExtendedStat ) { int iRespLen = 0; // query status if ( iVer>=0x10D ) { // multi-query status iRespLen += 4; // status code if ( !pRes->m_sError.IsEmpty() ) return iRespLen + 4 +strlen ( pRes->m_sError.cstr() ); if ( !pRes->m_sWarning.IsEmpty() ) iRespLen += 4+strlen ( pRes->m_sWarning.cstr() ); } else if ( iVer>=0x106 ) { // warning message if ( !pRes->m_sWarning.IsEmpty() ) iRespLen += 4 + strlen ( pRes->m_sWarning.cstr() ); } // query stats iRespLen += 20; int iAttrsCount = SendGetAttrCount ( pRes->m_tSchema ); // schema if ( iVer>=0x102 ) { iRespLen += 8; // 4 for field count, 4 for attr count ARRAY_FOREACH ( i, pRes->m_tSchema.m_dFields ) iRespLen += 4 + strlen ( pRes->m_tSchema.m_dFields[i].m_sName.cstr() ); // namelen, name for ( int i=0; im_tSchema.GetAttr(i).m_sName.cstr() ); // namelen, name, type } // matches if ( iVer<0x102 ) iRespLen += 16*pRes->m_iCount; // matches else if ( iVer<0x108 ) iRespLen += ( 8+4*iAttrsCount )*pRes->m_iCount; // matches else iRespLen += 4 + ( 8+4*USE_64BIT+4*iAttrsCount )*pRes->m_iCount; // id64 tag and matches if ( iVer>=0x114 ) { // 64bit matches int iWideAttrs = 0; for ( int i=0; im_tSchema.GetAttr(i).m_eAttrType==SPH_ATTR_BIGINT ) iWideAttrs++; iRespLen += 4*pRes->m_iCount*iWideAttrs; // extra 4 bytes per attr per match } // agents send additional flag from words statistics if ( bExtendedStat ) iRespLen += pRes->m_hWordStats.GetLength(); pRes->m_hWordStats.IterateStart(); while ( pRes->m_hWordStats.IterateNext() ) // per-word stats iRespLen += 12 + strlen ( pRes->m_hWordStats.IterateGetKey().cstr() ); // wordlen, word, docs, hits // MVA and string values CSphVector dMvaItems; CSphVector dStringItems; for ( int i=0; im_tSchema.GetAttr(i); if ( tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET ) dMvaItems.Add ( tCol.m_tLocator ); if ( tCol.m_eAttrType==SPH_ATTR_STRING ) dStringItems.Add ( tCol.m_tLocator ); } if ( iVer>=0x10C && dMvaItems.GetLength() ) { for ( int i=0; im_iCount; i++ ) { const CSphMatch & tMatch = pRes->m_dMatches [ pRes->m_iOffset+i ]; const DWORD * pMvaPool = dTag2Pools [ tMatch.m_iTag ].m_pMva; ARRAY_FOREACH ( j, dMvaItems ) { assert ( tMatch.GetAttr ( dMvaItems[j] )==0 || pMvaPool ); const DWORD * pMva = tMatch.GetAttrMVA ( dMvaItems[j], pMvaPool ); if ( pMva ) iRespLen += pMva[0]*4; // FIXME? maybe add some sanity check here } } } if ( iVer>=0x117 && dStringItems.GetLength() ) { for ( int i=0; im_iCount; i++ ) { const CSphMatch & tMatch = pRes->m_dMatches [ pRes->m_iOffset+i ]; const BYTE * pStrings = dTag2Pools [ tMatch.m_iTag ].m_pStrings; ARRAY_FOREACH ( j, dStringItems ) { DWORD uOffset = (DWORD) tMatch.GetAttr ( dStringItems[j] ); assert ( !uOffset || pStrings ); if ( uOffset ) // magic zero iRespLen += sphUnpackStr ( pStrings+uOffset, NULL ); } } } return iRespLen; } void SendResult ( int iVer, NetOutputBuffer_c & tOut, const CSphQueryResult * pRes, const CSphVector & dTag2Pools, bool bExtendedStat ) { // status if ( iVer>=0x10D ) { // multi-query status bool bError = !pRes->m_sError.IsEmpty(); bool bWarning = !bError && !pRes->m_sWarning.IsEmpty(); if ( bError ) { tOut.SendInt ( SEARCHD_ERROR ); tOut.SendString ( pRes->m_sError.cstr() ); if ( g_bOptNoDetach && g_eLogFormat!=LOG_FORMAT_SPHINXQL ) sphInfo ( "query error: %s", pRes->m_sError.cstr() ); return; } else if ( bWarning ) { tOut.SendInt ( SEARCHD_WARNING ); tOut.SendString ( pRes->m_sWarning.cstr() ); if ( g_bOptNoDetach && g_eLogFormat!=LOG_FORMAT_SPHINXQL ) sphInfo ( "query warning: %s", pRes->m_sWarning.cstr() ); } else { tOut.SendInt ( SEARCHD_OK ); } } else { // single-query warning if ( iVer>=0x106 && !pRes->m_sWarning.IsEmpty() ) tOut.SendString ( pRes->m_sWarning.cstr() ); } int iAttrsCount = SendGetAttrCount ( pRes->m_tSchema ); // send schema if ( iVer>=0x102 ) { tOut.SendInt ( pRes->m_tSchema.m_dFields.GetLength() ); ARRAY_FOREACH ( i, pRes->m_tSchema.m_dFields ) tOut.SendString ( pRes->m_tSchema.m_dFields[i].m_sName.cstr() ); tOut.SendInt ( iAttrsCount ); for ( int i=0; im_tSchema.GetAttr(i); tOut.SendString ( tCol.m_sName.cstr() ); tOut.SendDword ( (DWORD)tCol.m_eAttrType ); } } // send matches CSphAttrLocator iGIDLoc, iTSLoc; if ( iVer<=0x101 ) { for ( int i=0; im_tSchema.GetAttrsCount(); i++ ) { const CSphColumnInfo & tAttr = pRes->m_tSchema.GetAttr(i); if ( iTSLoc.m_iBitOffset<0 && tAttr.m_eAttrType==SPH_ATTR_TIMESTAMP ) iTSLoc = tAttr.m_tLocator; if ( iGIDLoc.m_iBitOffset<0 && tAttr.m_eAttrType==SPH_ATTR_INTEGER ) iGIDLoc = tAttr.m_tLocator; } } tOut.SendInt ( pRes->m_iCount ); if ( iVer>=0x108 ) tOut.SendInt ( USE_64BIT ); for ( int i=0; im_iCount; i++ ) { const CSphMatch & tMatch = pRes->m_dMatches [ pRes->m_iOffset+i ]; #if USE_64BIT if ( iVer>=0x108 ) tOut.SendUint64 ( tMatch.m_iDocID ); else #endif tOut.SendDword ( (DWORD)tMatch.m_iDocID ); if ( iVer<=0x101 ) { tOut.SendDword ( iGIDLoc.m_iBitOffset>=0 ? (DWORD) tMatch.GetAttr ( iGIDLoc ) : 1 ); tOut.SendDword ( iTSLoc.m_iBitOffset>=0 ? (DWORD) tMatch.GetAttr ( iTSLoc ) : 1 ); tOut.SendInt ( tMatch.m_iWeight ); } else { tOut.SendInt ( tMatch.m_iWeight ); const DWORD * pMvaPool = dTag2Pools [ tMatch.m_iTag ].m_pMva; const BYTE * pStrings = dTag2Pools [ tMatch.m_iTag ].m_pStrings; assert ( tMatch.m_pStatic || !pRes->m_tSchema.GetStaticSize() ); #if 0 // not correct any more because of internal attrs (such as string sorting ptrs) assert ( tMatch.m_pDynamic || !pRes->m_tSchema.GetDynamicSize() ); assert ( !tMatch.m_pDynamic || (int)tMatch.m_pDynamic[-1]==pRes->m_tSchema.GetDynamicSize() ); #endif for ( int j=0; jm_tSchema.GetAttr(j); if ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET || tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) { assert ( tMatch.GetAttr ( tAttr.m_tLocator )==0 || pMvaPool ); const DWORD * pValues = tMatch.GetAttrMVA ( tAttr.m_tLocator, pMvaPool ); if ( iVer<0x10C || !pValues ) { // for older clients, fixups column value to 0 // for newer clients, means that there are 0 values tOut.SendDword ( 0 ); } else { // send MVA values int iValues = *pValues++; tOut.SendDword ( iValues ); if ( tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) { assert ( ( iValues%2 )==0 ); while ( iValues ) { uint64_t uVal = MVA_UPSIZE ( pValues ); tOut.SendUint64 ( uVal ); pValues += 2; iValues -= 2; } } else { while ( iValues-- ) tOut.SendDword ( *pValues++ ); } } } else if ( tAttr.m_eAttrType==SPH_ATTR_STRING ) { // send string attr if ( iVer<0x117 ) { // for older clients, just send int value of 0 tOut.SendDword ( 0 ); } else { // for newer clients, send binary string DWORD uOffset = (DWORD) tMatch.GetAttr ( tAttr.m_tLocator ); if ( !uOffset ) // magic zero { tOut.SendDword ( 0 ); // null string } else { const BYTE * pStr; assert ( pStrings ); int iLen = sphUnpackStr ( pStrings+uOffset, &pStr ); tOut.SendDword ( iLen ); tOut.SendBytes ( pStr, iLen ); } } } else { // send plain attr if ( tAttr.m_eAttrType==SPH_ATTR_FLOAT ) tOut.SendFloat ( tMatch.GetAttrFloat ( tAttr.m_tLocator ) ); else if ( iVer>=0x114 && tAttr.m_eAttrType==SPH_ATTR_BIGINT ) tOut.SendUint64 ( tMatch.GetAttr ( tAttr.m_tLocator ) ); else tOut.SendDword ( (DWORD)tMatch.GetAttr ( tAttr.m_tLocator ) ); } } } } tOut.SendInt ( pRes->m_dMatches.GetLength() ); tOut.SendAsDword ( pRes->m_iTotalMatches ); tOut.SendInt ( Max ( pRes->m_iQueryTime, 0 ) ); tOut.SendInt ( pRes->m_hWordStats.GetLength() ); pRes->m_hWordStats.IterateStart(); while ( pRes->m_hWordStats.IterateNext() ) { const CSphQueryResultMeta::WordStat_t & tStat = pRes->m_hWordStats.IterateGet(); tOut.SendString ( pRes->m_hWordStats.IterateGetKey().cstr() ); tOut.SendAsDword ( tStat.m_iDocs ); tOut.SendAsDword ( tStat.m_iHits ); if ( bExtendedStat ) tOut.SendByte ( tStat.m_bExpanded ); } } ///////////////////////////////////////////////////////////////////////////// struct AggrResult_t : CSphQueryResult { int m_iTag; ///< current tag CSphVector m_dSchemas; ///< aggregated resultsets schemas (for schema minimization) CSphVector m_dMatchCounts; ///< aggregated resultsets lengths (for schema minimization) CSphVector m_dLockedAttrs; ///< indexes which are hold in the memory untill sending result CSphVector m_dTag2Pools; ///< tag to MVA and strings storage pools mapping }; struct TaggedMatchSorter_fn : public SphAccessor_T { void CopyKey ( CSphMatch * pMed, CSphMatch * pVal ) const { pMed->m_iDocID = pVal->m_iDocID; pMed->m_iTag = pVal->m_iTag; } bool IsLess ( const CSphMatch & a, const CSphMatch & b ) const { return ( a.m_iDocID < b.m_iDocID ) || ( a.m_iDocID==b.m_iDocID && a.m_iTag > b.m_iTag ); } // inherited swap does not work on gcc void Swap ( CSphMatch * a, CSphMatch * b ) const { ::Swap ( *a, *b ); } }; // just to avoid the const_cast of the schema (i.e, return writable columns) // also to make possible several members refer to one and same locator. class CVirtualSchema : public CSphSchema { public: inline CSphColumnInfo & LastColumn() { return m_dAttrs.Last(); } inline CSphColumnInfo & GetWAttr ( int iIndex ) { return m_dAttrs[iIndex]; } inline CSphVector & GetWAttrs () { return m_dAttrs; } inline void AlignSizes ( const CSphSchema& tProof ) { m_dDynamicUsed.Resize ( tProof.GetDynamicSize() ); m_iStaticSize = tProof.GetStaticSize(); } }; void MkIdAttribute ( CSphColumnInfo * pId ) { pId->m_tLocator.m_bDynamic = true; pId->m_sName = "id"; pId->m_eAttrType = USE_64BIT ? SPH_ATTR_BIGINT : SPH_ATTR_INTEGER; pId->m_tLocator.m_iBitOffset = -8*(int)sizeof(SphDocID_t); pId->m_tLocator.m_iBitCount = 8*sizeof(SphDocID_t); } void AddIDAttribute ( CVirtualSchema * pSchema ) { assert ( pSchema ); if ( pSchema->GetAttrIndex("id")>=0 ) return; CSphColumnInfo tId; MkIdAttribute ( &tId ); pSchema->GetWAttrs().Insert ( 0, tId ); } inline bool IsIDAttribute ( const CSphColumnInfo & tTarget ) { return tTarget.m_tLocator.IsID(); } // swap the schema into the new one void AdoptSchema ( AggrResult_t * pRes, CSphSchema * pSchema ) { pSchema->m_dFields = pRes->m_tSchema.m_dFields; pRes->m_tSchema = *pSchema; } void AdoptAliasedSchema ( AggrResult_t & tRes, CVirtualSchema * pSchema ) { pSchema->AlignSizes ( tRes.m_tSchema ); AdoptSchema ( &tRes, pSchema ); } void RemapResult ( CSphSchema * pTarget, AggrResult_t * pRes, bool bMultiSchema=true ) { int iCur = 0; CSphVector dMapFrom ( pTarget->GetAttrsCount() ); ARRAY_FOREACH ( iSchema, pRes->m_dSchemas ) { dMapFrom.Resize ( 0 ); CSphSchema & dSchema = ( bMultiSchema ? pRes->m_dSchemas[iSchema] : pRes->m_tSchema ); for ( int i=0; iGetAttrsCount(); i++ ) { dMapFrom.Add ( dSchema.GetAttrIndex ( pTarget->GetAttr(i).m_sName.cstr() ) ); assert ( dMapFrom[i]>=0 || IsIDAttribute ( pTarget->GetAttr(i) ) || sphIsSortStringInternal ( pTarget->GetAttr(i).m_sName.cstr() ) ); } int iLimit = bMultiSchema ? (int)Min ( iCur + pRes->m_dMatchCounts[iSchema], pRes->m_dMatches.GetLength() ) : (int)Min ( pRes->m_iTotalMatches, pRes->m_dMatches.GetLength() ); for ( int i=iCur; im_dMatches[i]; // create new and shiny (and properly sized) match CSphMatch tRow; tRow.Reset ( pTarget->GetDynamicSize() ); tRow.m_iDocID = tMatch.m_iDocID; tRow.m_iWeight = tMatch.m_iWeight; tRow.m_iTag = tMatch.m_iTag; // remap attrs for ( int j=0; jGetAttrsCount(); j++ ) { const CSphColumnInfo & tDst = pTarget->GetAttr(j); // we could keep some of the rows static // and so, avoid the duplication of the data. if ( !tDst.m_tLocator.m_bDynamic ) { assert ( dMapFrom[j]<0 || !dSchema.GetAttr ( dMapFrom[j] ).m_tLocator.m_bDynamic ); tRow.m_pStatic = tMatch.m_pStatic; } else if ( dMapFrom[j]>=0 ) { const CSphColumnInfo & tSrc = dSchema.GetAttr ( dMapFrom[j] ); if ( tDst.m_eAttrType==SPH_ATTR_FLOAT && tSrc.m_eAttrType==SPH_ATTR_BOOL ) { tRow.SetAttrFloat ( tDst.m_tLocator, ( tMatch.GetAttr ( tSrc.m_tLocator )>0 ? 1.0f : 0.0f ) ); } else { tRow.SetAttr ( tDst.m_tLocator, tMatch.GetAttr ( tSrc.m_tLocator ) ); } } } // swap out old (most likely wrong sized) match Swap ( tMatch, tRow ); } if ( !bMultiSchema ) break; iCur = iLimit; } if ( bMultiSchema ) assert ( iCur==pRes->m_dMatches.GetLength() ); if ( &pRes->m_tSchema!=pTarget ) AdoptSchema ( pRes, pTarget ); } // rebuild the results itemlist expanding stars const CSphVector * ExpandAsterisk ( const CSphSchema & tSchema, const CSphVector & tItems, CSphVector * pExpanded, bool bNoID=false ) { // the result schema usually is the index schema + calculated items + @-items // we need to extract the index schema only - so, look at the items // and cutoff from calculated or @. int iSchemaBound = tSchema.GetAttrsCount(); bool bStar = false; ARRAY_FOREACH ( i, tItems ) { const CSphQueryItem & tItem = tItems[i]; if ( tItem.m_sAlias.cstr() ) { int j = tSchema.GetAttrIndex ( tItem.m_sAlias.cstr() ); if ( j>=0 ) iSchemaBound = Min ( iSchemaBound, j ); } bStar = bStar || tItem.m_sExpr=="*"; } // no stars? Nothing to do. if ( !bStar ) return & tItems; while ( iSchemaBound && tSchema.GetAttr ( iSchemaBound-1 ).m_sName.cstr()[0]=='@' ) iSchemaBound--; ARRAY_FOREACH ( i, tItems ) { if ( tItems[i].m_sExpr=="*" ) { // asterisk expands to 'id' + all the items from the schema if ( tSchema.GetAttrIndex ( "id" )<0 && !bNoID ) { CSphQueryItem& tItem = pExpanded->Add(); tItem.m_sExpr = "id"; } for ( int j=0; jAdd(); tItem.m_sExpr = tSchema.GetAttr ( j ).m_sName; } } else pExpanded->Add ( tItems[i] ); } return pExpanded; } static void RemapStrings ( ISphMatchSorter * pSorter, AggrResult_t & tRes ) { // do match ptr pre-calc if its "order by string" case CSphVector dRemapAttr; if ( pSorter && pSorter->UsesAttrs() && sphSortGetStringRemap ( pSorter->GetSchema(), tRes.m_tSchema, dRemapAttr ) ) { int iCur = 0; ARRAY_FOREACH ( iSchema, tRes.m_dSchemas ) { for ( int i=iCur; iIsGroupby () ) { // groupby sorter does that automagically pSorter->SetMVAPool ( NULL ); // because we must be able to group on @groupby anyway pSorter->SetStringPool ( NULL ); ARRAY_FOREACH ( i, tRes.m_dMatches ) { CSphMatch & tMatch = tRes.m_dMatches[i]; if ( !pSorter->PushGrouped ( tMatch ) ) iDupes++; } } else { // normal sorter needs massasging // sort by docid and then by tag to guarantee the replacement order TaggedMatchSorter_fn fnSort; sphSort ( tRes.m_dMatches.Begin(), tRes.m_dMatches.GetLength(), fnSort, fnSort ); // fold them matches if ( tQuery.m_dIndexWeights.GetLength() ) { // if there were per-index weights, compute weighted ranks sum int iCur = 0; int iMax = tRes.m_dMatches.GetLength(); while ( iCurPush ( tMatch ); } } else { // by default, simply remove dupes (select first by tag) ARRAY_FOREACH ( i, tRes.m_dMatches ) { if ( i==0 || tRes.m_dMatches[i].m_iDocID!=tRes.m_dMatches[i-1].m_iDocID ) pSorter->Push ( tRes.m_dMatches[i] ); else iDupes++; } } } tRes.m_dMatches.Reset (); sphFlattenQueue ( pSorter, &tRes, -1 ); SafeDelete ( pSorter ); return iDupes; } static void RecoverAggregateFunctions ( const CSphQuery & tQuery, const AggrResult_t & tRes ) { ARRAY_FOREACH ( i, tQuery.m_dItems ) { const CSphQueryItem & tItem = tQuery.m_dItems[i]; if ( tItem.m_eAggrFunc==SPH_AGGR_NONE ) continue; for ( int j=0; j ( tRes.m_tSchema.GetAttr(j) ); if ( tCol.m_sName==tItem.m_sAlias ) { assert ( tCol.m_eAggrFunc==SPH_AGGR_NONE ); tCol.m_eAggrFunc = tItem.m_eAggrFunc; } } } } bool MinimizeAggrResult ( AggrResult_t & tRes, const CSphQuery & tQuery, bool bHadLocalIndexes, CSphSchema* pExtraSchema, bool bFromSphinxql=false ) { // sanity check int iExpected = 0; ARRAY_FOREACH ( i, tRes.m_dMatchCounts ) iExpected += tRes.m_dMatchCounts[i]; if ( iExpected!=tRes.m_dMatches.GetLength() ) { tRes.m_sError.SetSprintf ( "INTERNAL ERROR: expected %d matches in combined result set, got %d", iExpected, tRes.m_dMatches.GetLength() ); tRes.m_iSuccesses = 0; return false; } if ( !( bFromSphinxql || tRes.m_dMatches.GetLength() ) ) return true; // build minimal schema if ( !tRes.m_dSchemas.GetLength() && bFromSphinxql ) { AddIDAttribute ( (CVirtualSchema*) &tRes.m_tSchema ); return true; } tRes.m_tSchema = tRes.m_dSchemas[0]; bool bAllEqual = true; bool bAgent = tQuery.m_bAgent; bool bUsualApi = !( bAgent || bFromSphinxql ); for ( int i=1; i tExtItems; const CSphVector * pSelectItems = ExpandAsterisk ( tRes.m_tSchema, tQuery.m_dItems, &tExtItems, bUsualApi ); if ( !bUsualApi ) { AddIDAttribute ( (CVirtualSchema*) &tRes.m_tSchema ); ARRAY_FOREACH ( i, tRes.m_dSchemas ) AddIDAttribute ( (CVirtualSchema*) &tRes.m_dSchemas[i] ); } // the final result schema - for collections, etc // we can't construct the random final schema right now, since // the final sorter needs the schema fields in specific order: // shortcuts CSphString sCount("@count"); CSphString sWeight("@weight"); // truly virtual schema which contains unique necessary fields. CVirtualSchema tInternalSchema; // truly virtual schema for final result returning CVirtualSchema tFrontendSchema; tFrontendSchema.GetWAttrs().Resize ( pSelectItems->GetLength() ); CSphVector dKnownItems; int iKnownItems = 0; if ( pSelectItems->GetLength() ) { for ( int i=0; iGetAttrsCount() ) { for ( int j=0; jGetAttrsCount(); j++ ) if ( pExtraSchema->GetAttr(j).m_sName==tCol.m_sName ) bAdd = true; // the extra schema is not null, but empty - and we have no local agents // so, the schema of result is already aligned to the extra, just add it } else if ( !bHadLocalIndexes ) bAdd = true; } if ( !bAdd && bUsualApi && *tCol.m_sName.cstr()=='@' ) bAdd = true; if ( !bAdd ) continue; } // if before all schemas were proved as equal, and the tCol taken from current schema is static - // this is no reason now to make it dynamic. bool bDynamic = ( bAllEqual ? tCol.m_tLocator.m_bDynamic : true ); tInternalSchema.AddAttr ( tCol, bDynamic ); if ( !bDynamic ) { // all schemas are equal, so all offsets and bitcounts also equal. // If we Add the static attribute which already exists in result, we need // not to corrupt it's locator. So, in this case let us force the locator // to the old data. CSphColumnInfo & tNewCol = tInternalSchema.LastColumn(); assert ( !tNewCol.m_tLocator.m_bDynamic ); tNewCol.m_tLocator = tCol.m_tLocator; } } bAllEqual &= ( tRes.m_tSchema.GetAttrsCount()==tInternalSchema.GetAttrsCount() ); } // check if we actually have all required columns already if ( iKnownItemsGetLength() ) { tRes.m_iSuccesses = 0; dKnownItems.Sort(); ARRAY_FOREACH ( j, dKnownItems ) if ( j!=dKnownItems[j] ) { tRes.m_sError.SetSprintf ( "INTERNAL ERROR: the column '%s/%s' does not present in result set schema", (*pSelectItems)[j].m_sExpr.cstr(), (*pSelectItems)[j].m_sAlias.cstr() ); return false; } if ( dKnownItems.GetLength()==pSelectItems->GetLength()-1 ) { tRes.m_sError.SetSprintf ( "INTERNAL ERROR: the column '%s/%s' does not present in result set schema", pSelectItems->Last().m_sExpr.cstr(), pSelectItems->Last().m_sAlias.cstr() ); return false; } tRes.m_sError = "INTERNAL ERROR: some columns does not present in result set schema"; return false; } // finalize the tFrontendSchema - switch back m_iIndex field // and set up the locators for the fields if ( !bAgent ) { ARRAY_FOREACH ( i, tFrontendSchema.GetWAttrs() ) { CSphColumnInfo & tCol = tFrontendSchema.GetWAttr(i); const CSphColumnInfo & tSource = tInternalSchema.GetAttr ( tCol.m_iIndex ); tCol.m_tLocator = tSource.m_tLocator; tCol.m_eAttrType = tSource.m_eAttrType; tCol.m_iIndex = -1; } } // tricky bit // in purely distributed case, all schemas are received from the wire, and miss aggregate functions info // thus, we need to re-assign that info if ( !bHadLocalIndexes ) RecoverAggregateFunctions ( tQuery, tRes ); // we do not need to re-sort if there's exactly one result set if ( tRes.m_iSuccesses==1 ) { // convert all matches to minimal schema if ( !bAllEqual ) RemapResult ( &tInternalSchema, &tRes ); if ( !bAgent ) AdoptAliasedSchema ( tRes, &tFrontendSchema ); return true; } // if there's more than one result set, we need to re-sort the matches // so we need to bring matches to the schema that the *sorter* wants // so we need to create the sorter before conversion // // create queue // at this point, we do not need to compute anything; it all must be here ISphMatchSorter * pSorter = sphCreateQueue ( &tQuery, tRes.m_tSchema, tRes.m_sError, false ); if ( !pSorter ) return false; // reset bAllEqual flag if sorter makes new attributes if ( bAllEqual ) { // at first we count already existed internal attributes // then check if sorter makes more CSphVector dRemapAttr; sphSortGetStringRemap ( tRes.m_tSchema, tRes.m_tSchema, dRemapAttr ); int iRemapCount = dRemapAttr.GetLength(); sphSortGetStringRemap ( pSorter->GetSchema(), tRes.m_tSchema, dRemapAttr ); bAllEqual = ( dRemapAttr.GetLength()<=iRemapCount ); } // sorter expects this tRes.m_tSchema = pSorter->GetSchema(); // convert all matches to sorter schema - at least to manage all static to dynamic if ( !bAllEqual ) RemapResult ( &tRes.m_tSchema, &tRes ); RemapStrings ( pSorter, tRes ); tRes.m_iTotalMatches -= KillAllDupes ( pSorter, tRes, tQuery ); if ( !bAllEqual ) RemapResult ( &tInternalSchema, &tRes, false ); if ( !bAgent ) AdoptAliasedSchema ( tRes, &tFrontendSchema ); return true; } bool MinimizeAggrResultCompat ( AggrResult_t & tRes, const CSphQuery & tQuery, bool bHadLocalIndexes ) { // sanity check int iExpected = 0; ARRAY_FOREACH ( i, tRes.m_dMatchCounts ) iExpected += tRes.m_dMatchCounts[i]; if ( iExpected!=tRes.m_dMatches.GetLength() ) { tRes.m_sError.SetSprintf ( "INTERNAL ERROR: expected %d matches in combined result set, got %d", iExpected, tRes.m_dMatches.GetLength() ); return false; } if ( !tRes.m_dMatches.GetLength() ) return true; // build minimal schema bool bAllEqual = true; tRes.m_tSchema = tRes.m_dSchemas[0]; for ( int i=1; iGetWAttrs().Remove(iStar); } if ( !bStar && tQuery.m_dItems.GetLength() ) { CSphSchema tItems; for ( int i=0; i dRemapAttr; sphSortGetStringRemap ( tRes.m_tSchema, tRes.m_tSchema, dRemapAttr ); int iRemapCount = dRemapAttr.GetLength(); sphSortGetStringRemap ( pSorter->GetSchema(), tRes.m_tSchema, dRemapAttr ); bAllEqual = ( dRemapAttr.GetLength()<=iRemapCount ); } // sorter expects this tRes.m_tSchema = pSorter->GetSchema(); } // convert all matches to minimal schema if ( !bAllEqual ) RemapResult ( &tRes.m_tSchema, &tRes ); // we do not need to re-sort if there's exactly one result set if ( tRes.m_iSuccesses==1 ) return true; RemapStrings ( pSorter, tRes ); tRes.m_iTotalMatches -= KillAllDupes ( pSorter, tRes, tQuery ); return true; } void SetupKillListFilter ( CSphFilterSettings & tFilter, const SphAttr_t * pKillList, int nEntries ) { assert ( nEntries && pKillList ); tFilter.m_bExclude = true; tFilter.m_eType = SPH_FILTER_VALUES; tFilter.m_uMinValue = pKillList[0]; tFilter.m_uMaxValue = pKillList[nEntries-1]; tFilter.m_sAttrName = "@id"; tFilter.SetExternalValues ( pKillList, nEntries ); } ///////////////////////////////////////////////////////////////////////////// class CSphSchemaMT : public CSphSchema { public: explicit CSphSchemaMT ( const char * sName="(nameless)" ) : CSphSchema ( sName ), m_pLock ( NULL ) {} void AwareMT() { if ( m_pLock ) return; m_pLock = new CSphRwlock(); m_pLock->Init(); } ~CSphSchemaMT() { if ( m_pLock ) Verify ( m_pLock->Done() ); SafeDelete ( m_pLock ) } // get wlocked entry, only if it is not yet touched inline CSphSchemaMT * GetVirgin () { if ( !m_pLock ) return this; if ( m_pLock->WriteLock() ) { if ( m_dAttrs.GetLength()!=0 ) // not already a virgin { m_pLock->Unlock(); return NULL; } return this; } else { sphLogDebug ( "WriteLock %p failed", this ); assert ( false ); } return NULL; } inline CSphSchemaMT * RLock() { if ( !m_pLock ) return this; if ( !m_pLock->ReadLock() ) { sphLogDebug ( "ReadLock %p failed", this ); assert ( false ); } return this; } inline void UnLock() const { if ( m_pLock ) m_pLock->Unlock(); } private: mutable CSphRwlock * m_pLock; }; class UnlockOnDestroy { public: explicit UnlockOnDestroy ( const CSphSchemaMT * lock ) : m_pLock ( lock ) {} inline ~UnlockOnDestroy() { if ( m_pLock ) m_pLock->UnLock(); } private: const CSphSchemaMT * m_pLock; }; class SearchHandler_c { friend void LocalSearchThreadFunc ( void * pArg ); public: explicit SearchHandler_c ( int iQueries, bool bSphinxql=false ); ~SearchHandler_c(); void RunQueries (); ///< run all queries, get all results void RunUpdates ( const CSphQuery & tQuery, const CSphString & sIndex, CSphAttrUpdateEx * pUpdates ); ///< run Update command instead of Search public: CSphVector m_dQueries; ///< queries which i need to search CSphVector m_dResults; ///< results which i obtained CSphVector m_dFailuresSet; ///< failure logs for each query CSphVector < CSphVector > m_dAgentTimes; ///< per-agent time stats protected: void RunSubset ( int iStart, int iEnd ); ///< run queries against index(es) from first query in the subset void RunLocalSearches ( ISphMatchSorter * pLocalSorter, const char * sDistName ); void RunLocalSearchesMT (); bool RunLocalSearch ( int iLocal, ISphMatchSorter ** ppSorters, CSphQueryResult ** pResults ) const; bool HasExpresions ( int iStart, int iEnd ) const; CSphVector m_dMvaStorage; CSphVector m_dStringsStorage; int m_iStart; ///< subset start int m_iEnd; ///< subset end bool m_bMultiQueue; ///< whether current subset is subject to multi-queue optimization CSphVector m_dLocal; ///< local indexes for the current subset mutable CSphVector m_dExtraSchemas; ///< the extra fields for agents bool m_bSphinxql; ///< if the query get from sphinxql - to avoid applying sphinxql magick for others CSphAttrUpdateEx * m_pUpdates; ///< holder for updates mutable CSphMutex m_tLock; mutable SmallStringHash_T m_hUsed; const ServedIndex_t * UseIndex ( int iLocal ) const; void ReleaseIndex ( int iLocal ) const; void OnRunFinished (); }; SearchHandler_c::SearchHandler_c ( int iQueries, bool bSphinxql ) { m_iStart = m_iEnd = 0; m_bMultiQueue = false; m_dQueries.Resize ( iQueries ); m_dResults.Resize ( iQueries ); m_dFailuresSet.Resize ( iQueries ); m_dExtraSchemas.Resize ( iQueries ); m_dAgentTimes.Resize ( iQueries ); m_tLock.Init(); m_bSphinxql = bSphinxql; m_pUpdates = NULL; m_dMvaStorage.Reserve ( 1024 ); m_dMvaStorage.Add ( 0 ); // dummy value m_dStringsStorage.Reserve ( 1024 ); m_dStringsStorage.Add ( 0 ); // dummy value ARRAY_FOREACH ( i, m_dResults ) { m_dResults[i].m_iTag = 1; // first avail tag for local storage ptrs m_dResults[i].m_dTag2Pools.Add (); // reserved index 0 for remote mva storage ptr; we'll fix this up later } } SearchHandler_c::~SearchHandler_c () { m_tLock.Done(); m_hUsed.IterateStart(); while ( m_hUsed.IterateNext() ) { if ( m_hUsed.IterateGet()>0 ) g_pIndexes->GetUnlockedEntry ( m_hUsed.IterateGetKey() ).Unlock(); } } const ServedIndex_t * SearchHandler_c::UseIndex ( int iLocal ) const { assert ( iLocal>=0 && iLocalGetRlockedEntry ( sName ); m_tLock.Lock(); int * pUseCount = m_hUsed ( sName ); assert ( ( m_pUpdates && pUseCount && *pUseCount>0 ) || !m_pUpdates ); const ServedIndex_t * pServed = NULL; if ( pUseCount && *pUseCount>0 ) { pServed = &g_pIndexes->GetUnlockedEntry ( sName ); *pUseCount += ( pServed!=NULL ); } else { pServed = g_pIndexes->GetRlockedEntry ( sName ); if ( pServed ) { if ( pUseCount ) (*pUseCount)++; else m_hUsed.Add ( 1, sName ); } } m_tLock.Unlock(); return pServed; } void SearchHandler_c::ReleaseIndex ( int iLocal ) const { assert ( iLocal>=0 && iLocal=0 ); (*pUseCount)--; if ( !*pUseCount ) g_pIndexes->GetUnlockedEntry ( sName ).Unlock(); assert ( ( m_pUpdates && pUseCount && *pUseCount ) || !m_pUpdates ); m_tLock.Unlock(); } void SearchHandler_c::RunUpdates ( const CSphQuery & tQuery, const CSphString & sIndex, CSphAttrUpdateEx * pUpdates ) { m_pUpdates = pUpdates; m_dQueries[0] = tQuery; m_dQueries[0].m_sIndexes = sIndex; // lets add index to prevent deadlock // as index already r-locker or w-locked at this point m_dLocal.Add ( sIndex ); m_hUsed.Add ( 1, sIndex ); CheckQuery ( tQuery, *pUpdates->m_pError ); if ( !pUpdates->m_pError->IsEmpty() ) return; int64_t tmLocal = -sphMicroTimer(); if ( g_bIOStats ) sphStartIOStats (); RunLocalSearches ( NULL, NULL ); tmLocal += sphMicroTimer(); OnRunFinished(); CSphQueryResult & tRes = m_dResults[0]; tRes.m_iOffset = tQuery.m_iOffset; tRes.m_iCount = Max ( Min ( tQuery.m_iLimit, tRes.m_dMatches.GetLength()-tQuery.m_iOffset ), 0 ); tRes.m_iQueryTime += (int)(tmLocal/1000); tRes.m_iCpuTime += tmLocal; if ( !tRes.m_iSuccesses ) { StrBuf_t sFailures; m_dFailuresSet[0].BuildReport ( sFailures ); *pUpdates->m_pError = sFailures.cstr(); } else if ( !tRes.m_sError.IsEmpty() ) { StrBuf_t sFailures; m_dFailuresSet[0].BuildReport ( sFailures ); tRes.m_sWarning = sFailures.cstr(); // FIXME!!! commint warnings too } const CSphIOStats & tIO = sphStopIOStats (); if ( g_pStats ) { g_tStatsMutex.Lock(); g_pStats->m_iQueries += 1; g_pStats->m_iQueryTime += tmLocal; g_pStats->m_iQueryCpuTime += tmLocal; g_pStats->m_iDiskReads += tIO.m_iReadOps; g_pStats->m_iDiskReadTime += tIO.m_iReadTime; g_pStats->m_iDiskReadBytes += tIO.m_iReadBytes; g_tStatsMutex.Unlock(); } LogQuery ( m_dQueries[0], m_dResults[0], m_dAgentTimes[0] ); }; void SearchHandler_c::RunQueries () { /////////////////////////////// // choose path and run queries /////////////////////////////// // check if all queries are to the same index bool bSameIndex = false; if ( m_dQueries.GetLength()>1 ) { bSameIndex = true; ARRAY_FOREACH ( i, m_dQueries ) if ( m_dQueries[i].m_sIndexes!=m_dQueries[0].m_sIndexes ) { bSameIndex = false; break; } } if ( bSameIndex ) { /////////////////////////////// // batch queries to same index /////////////////////////////// RunSubset ( 0, m_dQueries.GetLength()-1 ); ARRAY_FOREACH ( i, m_dQueries ) LogQuery ( m_dQueries[i], m_dResults[i], m_dAgentTimes[i] ); } else { ///////////////////////////////////////////// // fallback; just work each query separately ///////////////////////////////////////////// ARRAY_FOREACH ( i, m_dQueries ) { RunSubset ( i, i ); LogQuery ( m_dQueries[i], m_dResults[i], m_dAgentTimes[i] ); } } OnRunFinished(); } // final fixup void SearchHandler_c::OnRunFinished() { ARRAY_FOREACH ( i, m_dResults ) { m_dResults[i].m_dTag2Pools[0].m_pMva = m_dMvaStorage.Begin(); m_dResults[i].m_dTag2Pools[0].m_pStrings = m_dStringsStorage.Begin(); m_dResults[i].m_iMatches = m_dResults[i].m_dMatches.GetLength(); } } /// return cpu time, in microseconds int64_t sphCpuTimer () { #ifdef HAVE_CLOCK_GETTIME if ( !g_bCpuStats ) return 0; #if defined(CLOCK_PROCESS_CPUTIME_ID) // CPU time (user+sys), Linux style #define LOC_CLOCK CLOCK_PROCESS_CPUTIME_ID #elif defined(CLOCK_PROF) // CPU time (user+sys), FreeBSD style #define LOC_CLOCK CLOCK_PROF #else // POSIX fallback (wall time) #define LOC_CLOCK CLOCK_REALTIME #endif struct timespec tp; if ( clock_gettime ( LOC_CLOCK, &tp ) ) return 0; return tp.tv_sec*1000000 + tp.tv_nsec/1000; #else return 0; #endif } struct LocalSearch_t { int m_iLocal; ISphMatchSorter ** m_ppSorters; CSphQueryResult ** m_ppResults; bool m_bResult; }; struct LocalSearchThreadContext_t { SphThread_t m_tThd; SearchHandler_c * m_pHandler; CSphVector m_pSearches; CrashQuery_t m_tCrashQuery; }; void LocalSearchThreadFunc ( void * pArg ) { LocalSearchThreadContext_t * pContext = (LocalSearchThreadContext_t*) pArg; // setup query guard for thread SphCrashLogger_c tQueryTLS; tQueryTLS.SetupTLS (); SphCrashLogger_c::SetLastQuery ( pContext->m_tCrashQuery ); ARRAY_FOREACH ( i, pContext->m_pSearches ) { LocalSearch_t * pCall = pContext->m_pSearches[i]; pCall->m_bResult = pContext->m_pHandler->RunLocalSearch ( pCall->m_iLocal, pCall->m_ppSorters, pCall->m_ppResults ); } } static void MergeWordStats ( CSphQueryResultMeta & tDstResult, const SmallStringHash_T & hSrc, SearchFailuresLog_c * pLog, const char * sIndex ) { assert ( pLog ); if ( !tDstResult.m_hWordStats.GetLength() ) { // nothing has been set yet; just copy tDstResult.m_hWordStats = hSrc; return; } hSrc.IterateStart(); while ( hSrc.IterateNext() ) { const CSphQueryResultMeta::WordStat_t * pDstStat = tDstResult.m_hWordStats ( hSrc.IterateGetKey() ); const CSphQueryResultMeta::WordStat_t & tSrcStat = hSrc.IterateGet(); // all indexes should produce same words from the query if ( !pDstStat && !tSrcStat.m_bExpanded ) { pLog->SubmitEx ( sIndex, "query words mismatch '%s'", hSrc.IterateGetKey().cstr() ); } tDstResult.AddStat ( hSrc.IterateGetKey(), tSrcStat.m_iDocs, tSrcStat.m_iHits, tSrcStat.m_bExpanded ); } } static void FlattenToRes ( ISphMatchSorter * pSorter, AggrResult_t & tRes ) { assert ( pSorter ); if ( pSorter->GetLength() ) { tRes.m_dMatchCounts.Add ( pSorter->GetLength() ); tRes.m_dSchemas.Add ( tRes.m_tSchema ); PoolPtrs_t & tPoolPtrs = tRes.m_dTag2Pools.Add (); tPoolPtrs.m_pMva = tRes.m_pMva; tPoolPtrs.m_pStrings = tRes.m_pStrings; sphFlattenQueue ( pSorter, &tRes, tRes.m_iTag++ ); // clean up for next index search tRes.m_pMva = NULL; tRes.m_pStrings = NULL; } } void SearchHandler_c::RunLocalSearchesMT () { int64_t tmLocal = sphMicroTimer(); // setup local searches const int iQueries = m_iEnd-m_iStart+1; CSphVector dLocals ( m_dLocal.GetLength() ); CSphVector dResults ( m_dLocal.GetLength()*iQueries ); CSphVector pSorters ( m_dLocal.GetLength()*iQueries ); CSphVector pResults ( m_dLocal.GetLength()*iQueries ); ARRAY_FOREACH ( i, pResults ) pResults[i] = &dResults[i]; ARRAY_FOREACH ( i, m_dLocal ) { dLocals[i].m_iLocal = i; dLocals[i].m_ppSorters = &pSorters [ i*iQueries ]; dLocals[i].m_ppResults = &pResults [ i*iQueries ]; } // setup threads // FIXME! implement better than naive index:thread mapping // FIXME! maybe implement a thread-shared jobs queue CSphVector dThreads ( Min ( g_iDistThreads, dLocals.GetLength() ) ); int iCurThread = 0; ARRAY_FOREACH ( i, dLocals ) { dThreads[iCurThread].m_pSearches.Add ( &dLocals[i] ); iCurThread = ( iCurThread+1 ) % g_iDistThreads; } // prepare for multithread extra schema processing for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) m_dExtraSchemas[iQuery].AwareMT(); CrashQuery_t tCrashQuery = SphCrashLogger_c::GetQuery(); // transfer query info for crash logger to new thread // fire searcher threads ARRAY_FOREACH ( i, dThreads ) { dThreads[i].m_pHandler = this; dThreads[i].m_tCrashQuery = tCrashQuery; sphThreadCreate ( &dThreads[i].m_tThd, LocalSearchThreadFunc, (void*)&dThreads[i] ); // FIXME! check result } // wait for them to complete ARRAY_FOREACH ( i, dThreads ) sphThreadJoin ( &dThreads[i].m_tThd ); // now merge the results ARRAY_FOREACH ( iLocal, dLocals ) { bool bResult = dLocals[iLocal].m_bResult; const char * sLocal = m_dLocal[iLocal].cstr(); if ( !bResult ) { // failed for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) { int iResultIndex = iLocal*iQueries; if ( !m_bMultiQueue ) iResultIndex += iQuery - m_iStart; m_dFailuresSet[iQuery].Submit ( sLocal, dResults[iResultIndex].m_sError.cstr() ); } continue; } // multi-query succeeded for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) { // base result set index // in multi-queue case, the only (!) result set actually filled with meta info // in non-multi-queue case, just a first index, we fix it below int iResultIndex = iLocal*iQueries; // current sorter ALWAYS resides at this index, in all cases // (current as in sorter for iQuery-th query against iLocal-th index) int iSorterIndex = iLocal*iQueries + iQuery - m_iStart; if ( !m_bMultiQueue ) { // non-multi-queue case // means that we have mere 1:1 mapping between results and sorters // so let's adjust result set index iResultIndex = iSorterIndex; } else if ( dResults[iResultIndex].m_iMultiplier==-1 ) { // multi-queue case // need to additionally check per-query failures of MultiQueryEx // those are reported through multiplier // note that iSorterIndex just below is NOT a typo // separate errors still go into separate result sets // even though regular meta does not m_dFailuresSet[iQuery].Submit ( sLocal, dResults[iSorterIndex].m_sError.cstr() ); continue; } // no sorter, no fun ISphMatchSorter * pSorter = pSorters[iSorterIndex]; if ( !pSorter ) continue; // this one seems OK AggrResult_t & tRes = m_dResults[iQuery]; CSphQueryResult & tRaw = dResults[iResultIndex]; tRes.m_iSuccesses++; tRes.m_tSchema = pSorter->GetSchema(); tRes.m_iTotalMatches += pSorter->GetTotalCount(); tRes.m_pMva = tRaw.m_pMva; tRes.m_pStrings = tRaw.m_pStrings; MergeWordStats ( tRes, tRaw.m_hWordStats, &m_dFailuresSet[iQuery], sLocal ); // move external attributes storage from tRaw to actual result tRaw.LeakStorages ( tRes ); tRes.m_iMultiplier = m_bMultiQueue ? iQueries : 1; tRes.m_iCpuTime += tRaw.m_iCpuTime / tRes.m_iMultiplier; // extract matches from sorter FlattenToRes ( pSorter, tRes ); } } ARRAY_FOREACH ( i, pSorters ) SafeDelete ( pSorters[i] ); // update our wall time for every result set tmLocal = sphMicroTimer() - tmLocal; for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) m_dResults[iQuery].m_iQueryTime += (int)( tmLocal/1000 ); } // invoked from MT searches. So, must be MT-aware! bool SearchHandler_c::RunLocalSearch ( int iLocal, ISphMatchSorter ** ppSorters, CSphQueryResult ** ppResults ) const { const int iQueries = m_iEnd-m_iStart+1; const ServedIndex_t * pServed = UseIndex ( iLocal ); if ( !pServed ) { // FIXME! submit a failure? return false; } assert ( pServed->m_pIndex ); assert ( pServed->m_bEnabled ); // create sorters int iValidSorters = 0; for ( int i=0; im_sError; const CSphQuery & tQuery = m_dQueries[i+m_iStart]; CSphSchemaMT * pExtraSchemaMT = tQuery.m_bAgent?m_dExtraSchemas[i+m_iStart].GetVirgin():NULL; UnlockOnDestroy dSchemaLock ( pExtraSchemaMT ); assert ( !tQuery.m_iOldVersion || tQuery.m_iOldVersion>=0x102 ); ppSorters[i] = sphCreateQueue ( &tQuery, pServed->m_pIndex->GetMatchSchema(), sError, true, pExtraSchemaMT, m_pUpdates ); if ( ppSorters[i] ) iValidSorters++; } if ( !iValidSorters ) { ReleaseIndex ( iLocal ); return false; } CSphVector dLocked; // setup kill-lists CSphVector dKlists; for ( int i=iLocal+1; im_pIndex->GetKillListSize() ) { SetupKillListFilter ( dKlists.Add(), pKlistIndex->m_pIndex->GetKillList(), pKlistIndex->m_pIndex->GetKillListSize() ); dLocked.Add ( i ); } else { ReleaseIndex ( i ); } } // do the query bool bResult = false; pServed->m_pIndex->SetCacheSize ( g_iMaxCachedDocs, g_iMaxCachedHits ); if ( m_bMultiQueue ) { bResult = pServed->m_pIndex->MultiQuery ( &m_dQueries[m_iStart], ppResults[0], iQueries, ppSorters, &dKlists ); } else { bResult = pServed->m_pIndex->MultiQueryEx ( iQueries, &m_dQueries[m_iStart], ppResults, ppSorters, &dKlists ); } ARRAY_FOREACH ( i, dLocked ) ReleaseIndex ( dLocked[i] ); return bResult; } void SearchHandler_c::RunLocalSearches ( ISphMatchSorter * pLocalSorter, const char * sDistName ) { if ( g_iDistThreads>1 && m_dLocal.GetLength()>1 ) { RunLocalSearchesMT(); return; } CSphVector dLocked; ARRAY_FOREACH ( iLocal, m_dLocal ) { const char * sLocal = m_dLocal[iLocal].cstr(); const ServedIndex_t * pServed = UseIndex ( iLocal ); if ( !pServed ) { if ( sDistName ) for ( int i=m_iStart; i<=m_iEnd; i++ ) m_dFailuresSet[i].SubmitEx ( sDistName, "local index %s missing", sLocal ); continue; } assert ( pServed->m_pIndex ); assert ( pServed->m_bEnabled ); // create sorters CSphVector dSorters ( m_iEnd-m_iStart+1 ); ARRAY_FOREACH ( i, dSorters ) dSorters[i] = NULL; int iValidSorters = 0; for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) { CSphString sError; CSphQuery & tQuery = m_dQueries[iQuery]; CSphSchemaMT * pExtraSchema = tQuery.m_bAgent?m_dExtraSchemas[iQuery].GetVirgin():NULL; UnlockOnDestroy dSchemaLock ( pExtraSchema ); // create sorter, if needed ISphMatchSorter * pSorter = pLocalSorter; if ( !pLocalSorter ) { // fixup old queries if ( !FixupQuery ( &tQuery, &pServed->m_pIndex->GetMatchSchema(), sLocal, sError ) ) { m_dFailuresSet[iQuery].Submit ( sLocal, sError.cstr() ); continue; } // create queue pSorter = sphCreateQueue ( &tQuery, pServed->m_pIndex->GetMatchSchema(), sError, true, pExtraSchema, m_pUpdates ); if ( !pSorter ) { m_dFailuresSet[iQuery].Submit ( sLocal, sError.cstr() ); continue; } if ( !sError.IsEmpty() ) m_dFailuresSet[iQuery].Submit ( sLocal, sError.cstr() ); if ( !sError.IsEmpty() ) m_dFailuresSet[iQuery].Submit ( sLocal, sError.cstr() ); } dSorters[iQuery-m_iStart] = pSorter; iValidSorters++; } if ( !iValidSorters ) { ReleaseIndex ( iLocal ); continue; } // me shortcuts AggrResult_t tStats; CSphQuery * pQuery = &m_dQueries[m_iStart]; // set kill-list int iNumFilters = pQuery->m_dFilters.GetLength (); for ( int i=iLocal+1; im_pIndex->GetKillListSize () ) { CSphFilterSettings tKillListFilter; SetupKillListFilter ( tKillListFilter, pServed->m_pIndex->GetKillList (), pServed->m_pIndex->GetKillListSize () ); pQuery->m_dFilters.Add ( tKillListFilter ); dLocked.Add ( i ); } else { ReleaseIndex ( i ); } } // do the query bool bResult = false; pServed->m_pIndex->SetCacheSize ( g_iMaxCachedDocs, g_iMaxCachedHits ); if ( m_bMultiQueue ) { bResult = pServed->m_pIndex->MultiQuery ( &m_dQueries[m_iStart], &tStats, dSorters.GetLength(), &dSorters[0], NULL ); } else { CSphVector dResults ( m_dResults.GetLength() ); ARRAY_FOREACH ( i, m_dResults ) dResults[i] = &m_dResults[i]; bResult = pServed->m_pIndex->MultiQueryEx ( dSorters.GetLength(), &m_dQueries[m_iStart], &dResults[m_iStart], &dSorters[0], NULL ); } // handle results if ( !bResult ) { // failed for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) m_dFailuresSet[iQuery].Submit ( sLocal, m_dResults [ m_bMultiQueue ? m_iStart : iQuery ].m_sError.cstr() ); } else { // multi-query succeeded for ( int iQuery=m_iStart; iQuery<=m_iEnd; iQuery++ ) { // but some of the sorters could had failed at "create sorter" stage ISphMatchSorter * pSorter = dSorters [ iQuery-m_iStart ]; if ( !pSorter ) continue; // this one seems OK AggrResult_t & tRes = m_dResults[iQuery]; // multi-queue only returned one result set meta, so we need to replicate it if ( m_bMultiQueue ) { // these times will be overridden below, but let's be clean tRes.m_iQueryTime += tStats.m_iQueryTime / ( m_iEnd-m_iStart+1 ); tRes.m_iCpuTime += tStats.m_iCpuTime / ( m_iEnd-m_iStart+1 ); tRes.m_pMva = tStats.m_pMva; tRes.m_pStrings = tStats.m_pStrings; MergeWordStats ( tRes, tStats.m_hWordStats, &m_dFailuresSet[iQuery], sLocal ); tRes.m_iMultiplier = m_iEnd-m_iStart+1; } else if ( tRes.m_iMultiplier==-1 ) { m_dFailuresSet[iQuery].Submit ( sLocal, tRes.m_sError.cstr() ); continue; } tRes.m_iSuccesses++; tRes.m_tSchema = pSorter->GetSchema(); tRes.m_iTotalMatches += pSorter->GetTotalCount(); // extract matches from sorter FlattenToRes ( pSorter, tRes ); // move external attributes storage from tStats to actual result tStats.LeakStorages ( tRes ); } } // cleanup kill-list pQuery->m_dFilters.Resize ( iNumFilters ); ARRAY_FOREACH ( i, dLocked ) ReleaseIndex ( dLocked[i] ); dLocked.Resize ( 0 ); // cleanup sorters if ( !pLocalSorter ) ARRAY_FOREACH ( i, dSorters ) SafeDelete ( dSorters[i] ); } } // check expressions into a query to make sure that it's ready for multi query optimization bool SearchHandler_c::HasExpresions ( int iStart, int iEnd ) const { ARRAY_FOREACH ( i, m_dLocal ) { const ServedIndex_t * pServedIndex = UseIndex ( i ); // check that it exists if ( !pServedIndex || !pServedIndex->m_bEnabled ) { if ( pServedIndex ) ReleaseIndex ( i ); continue; } bool bHasExpression = false; const CSphSchema & tSchema = pServedIndex->m_pIndex->GetMatchSchema(); for ( int iCheck=iStart; iCheck<=iEnd && !bHasExpression; iCheck++ ) bHasExpression = sphHasExpressions ( m_dQueries[iCheck], tSchema ); ReleaseIndex ( i ); if ( bHasExpression ) return true; } return false; } void SearchHandler_c::RunSubset ( int iStart, int iEnd ) { m_iStart = iStart; m_iEnd = iEnd; m_dLocal.Reset(); // all my stats int64_t tmSubset = sphMicroTimer(); int64_t tmLocal = 0; int64_t tmWait = 0; int64_t tmCpu = sphCpuTimer (); if ( g_bIOStats ) sphStartIOStats (); // prepare for descent CSphQuery & tFirst = m_dQueries[iStart]; for ( int iRes=iStart; iRes<=iEnd; iRes++ ) m_dResults[iRes].m_iSuccesses = 0; //////////////////////////////////////////////////////////////// // check for single-query, multi-queue optimization possibility //////////////////////////////////////////////////////////////// m_bMultiQueue = ( iStart dAgents; CSphVector dDistLocal; bool bDist = false; int iAgentConnectTimeout = 0, iAgentQueryTimeout = 0; { g_tDistLock.Lock(); DistributedIndex_t * pDist = g_hDistIndexes ( tFirst.m_sIndexes ); if ( pDist ) { bDist = true; iAgentConnectTimeout = pDist->m_iAgentConnectTimeout; iAgentQueryTimeout = pDist->m_iAgentQueryTimeout; dDistLocal = pDist->m_dLocal; dAgents.Resize ( pDist->m_dAgents.GetLength() ); ARRAY_FOREACH ( i, pDist->m_dAgents ) dAgents[i] = pDist->m_dAgents[i]; } g_tDistLock.Unlock(); } if ( !bDist ) { // they're all local, build the list if ( tFirst.m_sIndexes=="*" ) { // search through all local indexes for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) if ( it.Get ().m_bEnabled ) m_dLocal.Add ( it.GetKey() ); } else { // search through specified local indexes ParseIndexList ( tFirst.m_sIndexes, m_dLocal ); // there should be no distributed indexes in multi-index query int iDistFound = -1; g_tDistLock.Lock(); ARRAY_FOREACH ( i, m_dLocal ) if ( g_hDistIndexes.Exists ( m_dLocal[i] ) ) { iDistFound = i; break; } g_tDistLock.Unlock(); if ( iDistFound!=-1 ) { for ( int iRes=iStart; iRes<=iEnd; iRes++ ) m_dResults[iRes].m_sError.SetSprintf ( "distributed index '%s' in multi-index query found", m_dLocal[iDistFound].cstr() ); return; } ARRAY_FOREACH ( i, m_dLocal ) { const ServedIndex_t * pServedIndex = UseIndex ( i ); // check that it exists if ( !pServedIndex ) { for ( int iRes=iStart; iRes<=iEnd; iRes++ ) m_dResults[iRes].m_sError.SetSprintf ( "unknown local index '%s' in search request", m_dLocal[i].cstr() ); return; } bool bEnabled = pServedIndex->m_bEnabled; ReleaseIndex ( i ); // if it exists but is not enabled, remove it from the list and force recheck if ( !bEnabled ) m_dLocal.Remove ( i-- ); } } // sanity check if ( !m_dLocal.GetLength() ) { for ( int iRes=iStart; iRes<=iEnd; iRes++ ) m_dResults[iRes].m_sError.SetSprintf ( "no enabled local indexes to search" ); return; } } else { // copy local indexes list from distributed definition, but filter out disabled ones ARRAY_FOREACH ( i, dDistLocal ) { int iDistLocal = m_dLocal.GetLength(); m_dLocal.Add ( dDistLocal[i] ); const ServedIndex_t * pServedIndex = UseIndex ( iDistLocal ); bool bValidLocalIndex = pServedIndex && pServedIndex->m_bEnabled; if ( pServedIndex ) ReleaseIndex ( iDistLocal ); if ( !bValidLocalIndex ) m_dLocal.Pop(); } } ///////////////////////////////////////////////////// // optimize single-query, same-schema local searches ///////////////////////////////////////////////////// ISphMatchSorter * pLocalSorter = NULL; while ( iStart==iEnd && m_dLocal.GetLength()>1 ) { CSphString sError; // check if all schemes are equal bool bAllEqual = true; const ServedIndex_t * pFirstIndex = UseIndex ( 0 ); if ( !pFirstIndex ) break; const CSphSchema & tFirstSchema = pFirstIndex->m_pIndex->GetMatchSchema(); for ( int i=1; im_pIndex->GetMatchSchema(), sError ) ) bAllEqual = false; ReleaseIndex ( i ); } // we can reuse the very same sorter if ( bAllEqual && FixupQuery ( &m_dQueries[iStart], &tFirstSchema, "local-sorter", sError ) ) { CSphSchemaMT * pExtraSchemaMT = m_dQueries[iStart].m_bAgent?m_dExtraSchemas[iStart].GetVirgin():NULL; UnlockOnDestroy ExtraLocker ( pExtraSchemaMT ); pLocalSorter = sphCreateQueue ( &m_dQueries[iStart], tFirstSchema, sError, true, pExtraSchemaMT ); } ReleaseIndex ( 0 ); break; } // select lists must have no expressions if ( m_bMultiQueue ) { m_bMultiQueue = !HasExpresions ( iStart, iEnd ); } // these are mutual exclusive assert ( !( m_bMultiQueue && pLocalSorter ) ); /////////////////////////////////////////////////////////// // main query loop (with multiple retries for distributed) /////////////////////////////////////////////////////////// tFirst.m_iRetryCount = Min ( Max ( tFirst.m_iRetryCount, 0 ), MAX_RETRY_COUNT ); // paranoid clamp if ( !bDist ) tFirst.m_iRetryCount = 0; for ( int iRetry=0; iRetry<=tFirst.m_iRetryCount; iRetry++ ) { //////////////////////// // issue remote queries //////////////////////// // delay between retries if ( iRetry>0 ) sphSleepMsec ( tFirst.m_iRetryDelay ); // connect to remote agents and query them, if required int iRemote = 0; if ( bDist ) { ConnectToRemoteAgents ( dAgents, iRetry!=0 ); SearchRequestBuilder_t tReqBuilder ( m_dQueries, iStart, iEnd ); iRemote = QueryRemoteAgents ( dAgents, iAgentConnectTimeout, tReqBuilder, &tmWait ); } ///////////////////// // run local queries ////////////////////// // while the remote queries are running, do local searches // FIXME! what if the remote agents finish early, could they timeout? if ( iRetry==0 ) { if ( bDist && !iRemote && !m_dLocal.GetLength() ) { for ( int iRes=iStart; iRes<=iEnd; iRes++ ) m_dResults[iRes].m_sError = "all remote agents unreachable and no available local indexes found"; SafeDelete ( pLocalSorter ); return; } tmLocal = -sphMicroTimer(); RunLocalSearches ( pLocalSorter, bDist ? tFirst.m_sIndexes.cstr() : NULL ); tmLocal += sphMicroTimer(); } /////////////////////// // poll remote queries /////////////////////// // wait for remote queries to complete if ( iRemote ) { SearchReplyParser_t tParser ( iStart, iEnd, m_dMvaStorage, m_dStringsStorage ); int iMsecLeft = iAgentQueryTimeout - (int)( tmLocal/1000 ); int iReplys = WaitForRemoteAgents ( dAgents, Max ( iMsecLeft, 0 ), tParser, &tmWait ); // check if there were valid (though might be 0-matches) replys, and merge them if ( iReplys ) ARRAY_FOREACH ( iAgent, dAgents ) { AgentConn_t & tAgent = dAgents[iAgent]; if ( !tAgent.m_bSuccess ) continue; // merge this agent's results for ( int iRes=iStart; iRes<=iEnd; iRes++ ) { const CSphQueryResult & tRemoteResult = tAgent.m_dResults[iRes-iStart]; // copy errors or warnings if ( !tRemoteResult.m_sError.IsEmpty() ) m_dFailuresSet[iRes].SubmitEx ( tFirst.m_sIndexes.cstr(), "agent %s: remote query error: %s", tAgent.GetName().cstr(), tRemoteResult.m_sError.cstr() ); if ( !tRemoteResult.m_sWarning.IsEmpty() ) m_dFailuresSet[iRes].SubmitEx ( tFirst.m_sIndexes.cstr(), "agent %s: remote query warning: %s", tAgent.GetName().cstr(), tRemoteResult.m_sWarning.cstr() ); if ( tRemoteResult.m_iSuccesses<=0 ) continue; AggrResult_t & tRes = m_dResults[iRes]; tRes.m_iSuccesses++; ARRAY_FOREACH ( i, tRemoteResult.m_dMatches ) { tRes.m_dMatches.Add(); tRes.m_dMatches.Last().Clone ( tRemoteResult.m_dMatches[i], tRemoteResult.m_tSchema.GetRowSize() ); tRes.m_dMatches.Last().m_iTag = 0; // all remote MVA values go to special pool which is at index 0 } tRes.m_dMatchCounts.Add ( tRemoteResult.m_dMatches.GetLength() ); tRes.m_dSchemas.Add ( tRemoteResult.m_tSchema ); // note how we do NOT add per-index weight here; remote agents are all tagged 0 (which contains weight 1) // merge this agent's stats tRes.m_iTotalMatches += tRemoteResult.m_iTotalMatches; tRes.m_iQueryTime += tRemoteResult.m_iQueryTime; // merge this agent's words MergeWordStats ( tRes, tRemoteResult.m_hWordStats, &m_dFailuresSet[iRes], tFirst.m_sIndexes.cstr() ); } // dismissed tAgent.m_dResults.Reset (); tAgent.m_bSuccess = false; tAgent.m_sFailure = ""; } } // check if we need to retry again int iToRetry = 0; if ( bDist ) ARRAY_FOREACH ( i, dAgents ) if ( dAgents[i].m_eState==AGENT_RETRY ) iToRetry++; if ( !iToRetry ) break; } // submit failures from failed agents // copy timings from all agents if ( bDist ) { ARRAY_FOREACH ( i, dAgents ) { const AgentConn_t & tAgent = dAgents[i]; for ( int j=iStart; j<=iEnd; j++ ) m_dAgentTimes[j].Add ( tAgent.m_iWall / ( iEnd-iStart+1 ) ); if ( !tAgent.m_bSuccess && !tAgent.m_sFailure.IsEmpty() ) for ( int j=iStart; j<=iEnd; j++ ) m_dFailuresSet[j].SubmitEx ( tFirst.m_sIndexes.cstr(), tAgent.m_bBlackhole ? "blackhole %s: %s" : "agent %s: %s", tAgent.GetName().cstr(), tAgent.m_sFailure.cstr() ); } } ARRAY_FOREACH ( i, m_dResults ) assert ( m_dResults[i].m_iTag==m_dResults[i].m_dTag2Pools.GetLength() ); // cleanup bool bWasLocalSorter = pLocalSorter!=NULL; SafeDelete ( pLocalSorter ); ///////////////////// // merge all results ///////////////////// for ( int iRes=iStart; iRes<=iEnd; iRes++ ) { AggrResult_t & tRes = m_dResults[iRes]; CSphQuery & tQuery = m_dQueries[iRes]; CSphSchemaMT * pExtraSchema = tQuery.m_bAgent?&m_dExtraSchemas[bWasLocalSorter?0:iRes]:NULL; // minimize sorters needs these pointers tRes.m_dTag2Pools[0].m_pMva = m_dMvaStorage.Begin(); tRes.m_dTag2Pools[0].m_pStrings = m_dStringsStorage.Begin(); // if there were no succesful searches at all, this is an error if ( !tRes.m_iSuccesses ) { StrBuf_t sFailures; m_dFailuresSet[iRes].BuildReport ( sFailures ); tRes.m_sError = sFailures.cstr(); continue; } // minimize schema and remove dupes if ( tRes.m_dSchemas.GetLength() ) tRes.m_tSchema = tRes.m_dSchemas[0]; if ( tRes.m_iSuccesses>1 || tQuery.m_dItems.GetLength() ) { if ( g_bCompatResults && !tQuery.m_bAgent ) { if ( !MinimizeAggrResultCompat ( tRes, tQuery, m_dLocal.GetLength()!=0 ) ) return; } else { if ( pExtraSchema ) pExtraSchema->RLock(); UnlockOnDestroy SchemaLocker ( pExtraSchema ); if ( !MinimizeAggrResult ( tRes, tQuery, m_dLocal.GetLength()!=0, pExtraSchema, m_bSphinxql ) ) return; } } if ( !m_dFailuresSet[iRes].IsEmpty() ) { StrBuf_t sFailures; m_dFailuresSet[iRes].BuildReport ( sFailures ); tRes.m_sWarning = sFailures.cstr(); } //////////// // finalize //////////// tRes.m_iOffset = tQuery.m_iOffset; tRes.m_iCount = Max ( Min ( tQuery.m_iLimit, tRes.m_dMatches.GetLength()-tQuery.m_iOffset ), 0 ); } // stats tmSubset = sphMicroTimer() - tmSubset; tmCpu = sphCpuTimer() - tmCpu; // in multi-queue case (1 actual call per N queries), just divide overall query time evenly // otherwise (N calls per N queries), divide common query time overheads evenly const int iQueries = iEnd-iStart+1; if ( m_bMultiQueue ) { for ( int iRes=iStart; iRes<=iEnd; iRes++ ) { m_dResults[iRes].m_iQueryTime = (int)( tmSubset/1000/iQueries ); m_dResults[iRes].m_iCpuTime = tmCpu/iQueries; } } else { int64_t tmAccountedWall = 0; int64_t tmAccountedCpu = 0; for ( int iRes=iStart; iRes<=iEnd; iRes++ ) { tmAccountedWall += m_dResults[iRes].m_iQueryTime*1000; tmAccountedCpu += m_dResults[iRes].m_iCpuTime; } int64_t tmDeltaWall = ( tmSubset - tmAccountedWall ) / iQueries; int64_t tmDeltaCpu = ( tmCpu - tmAccountedCpu ) / iQueries; for ( int iRes=iStart; iRes<=iEnd; iRes++ ) { m_dResults[iRes].m_iQueryTime += (int)(tmDeltaWall/1000); m_dResults[iRes].m_iCpuTime += tmDeltaCpu; } } const CSphIOStats & tIO = sphStopIOStats (); if ( g_pStats ) { g_tStatsMutex.Lock(); g_pStats->m_iQueries += iQueries; g_pStats->m_iQueryTime += tmSubset; g_pStats->m_iQueryCpuTime += tmCpu; if ( bDist && dAgents.GetLength() ) { // do *not* count queries to dist indexes w/o actual remote agents g_pStats->m_iDistQueries++; g_pStats->m_iDistWallTime += tmSubset; g_pStats->m_iDistLocalTime += tmLocal; g_pStats->m_iDistWaitTime += tmWait; } g_pStats->m_iDiskReads += tIO.m_iReadOps; g_pStats->m_iDiskReadTime += tIO.m_iReadTime; g_pStats->m_iDiskReadBytes += tIO.m_iReadBytes; g_tStatsMutex.Unlock(); } } bool CheckCommandVersion ( int iVer, int iDaemonVersion, InputBuffer_c & tReq ) { if ( (iVer>>8)!=(iDaemonVersion>>8) ) { tReq.SendErrorReply ( "major command version mismatch (expected v.%d.x, got v.%d.%d)", iDaemonVersion>>8, iVer>>8, iVer&0xff ); return false; } if ( iVer>iDaemonVersion ) { tReq.SendErrorReply ( "client version is higher than daemon version (client is v.%d.%d, daemon is v.%d.%d)", iVer>>8, iVer&0xff, iDaemonVersion>>8, iDaemonVersion&0xff ); return false; } return true; } void SendSearchResponse ( SearchHandler_c & tHandler, InputBuffer_c & tReq, int iSock, int iVer, int iMasterVer ) { // serve the response NetOutputBuffer_c tOut ( iSock ); int iReplyLen = 0; bool bExtendedStat = ( iMasterVer>0 ); if ( iVer<=0x10C ) { assert ( tHandler.m_dQueries.GetLength()==1 ); assert ( tHandler.m_dResults.GetLength()==1 ); const AggrResult_t & tRes = tHandler.m_dResults[0]; if ( !tRes.m_sError.IsEmpty() ) { tReq.SendErrorReply ( "%s", tRes.m_sError.cstr() ); return; } iReplyLen = CalcResultLength ( iVer, &tRes, tRes.m_dTag2Pools, bExtendedStat ); bool bWarning = ( iVer>=0x106 && !tRes.m_sWarning.IsEmpty() ); // send it tOut.SendWord ( (WORD)( bWarning ? SEARCHD_WARNING : SEARCHD_OK ) ); tOut.SendWord ( VER_COMMAND_SEARCH ); tOut.SendInt ( iReplyLen ); SendResult ( iVer, tOut, &tRes, tRes.m_dTag2Pools, bExtendedStat ); } else { ARRAY_FOREACH ( i, tHandler.m_dQueries ) iReplyLen += CalcResultLength ( iVer, &tHandler.m_dResults[i], tHandler.m_dResults[i].m_dTag2Pools, bExtendedStat ); // send it tOut.SendWord ( (WORD)SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_SEARCH ); tOut.SendInt ( iReplyLen ); ARRAY_FOREACH ( i, tHandler.m_dQueries ) SendResult ( iVer, tOut, &tHandler.m_dResults[i], tHandler.m_dResults[i].m_dTag2Pools, bExtendedStat ); } tOut.Flush (); assert ( tOut.GetError()==true || tOut.GetSentCount()==iReplyLen+8 ); // clean up ARRAY_FOREACH ( i, tHandler.m_dQueries ) SafeDeleteArray ( tHandler.m_dQueries[i].m_pWeights ); } void HandleCommandSearch ( int iSock, int iVer, InputBuffer_c & tReq ) { MEMORY ( SPH_MEM_SEARCH_NONSQL ); if ( !CheckCommandVersion ( iVer, VER_COMMAND_SEARCH, tReq ) ) return; int iMasterVer = 0; if ( iVer>=0x118 ) iMasterVer = tReq.GetInt(); // parse request int iQueries = 1; if ( iVer>=0x10D ) iQueries = tReq.GetDword (); if ( g_iMaxBatchQueries>0 && ( iQueries<=0 || iQueries>g_iMaxBatchQueries ) ) { tReq.SendErrorReply ( "bad multi-query count %d (must be in 1..%d range)", iQueries, g_iMaxBatchQueries ); return; } SearchHandler_c tHandler ( iQueries ); ARRAY_FOREACH ( i, tHandler.m_dQueries ) if ( !ParseSearchQuery ( tReq, tHandler.m_dQueries[i], iVer, iMasterVer ) ) return; // run queries, send response tHandler.RunQueries (); SendSearchResponse ( tHandler, tReq, iSock, iVer, iMasterVer ); } ////////////////////////////////////////////////////////////////////////// // SQL PARSER ////////////////////////////////////////////////////////////////////////// enum SqlStmt_e { STMT_PARSE_ERROR = 0, STMT_DUMMY, STMT_SELECT, STMT_INSERT, STMT_REPLACE, STMT_DELETE, STMT_SHOW_WARNINGS, STMT_SHOW_STATUS, STMT_SHOW_META, STMT_SET, STMT_BEGIN, STMT_COMMIT, STMT_ROLLBACK, STMT_CALL, STMT_DESC, STMT_SHOW_TABLES, STMT_UPDATE, STMT_CREATE_FUNC, STMT_DROP_FUNC, STMT_ATTACH_INDEX, STMT_FLUSH_RTINDEX, STMT_SHOW_VARIABLES, STMT_TOTAL }; const char * g_dSqlStmts[STMT_TOTAL] = { "parse_error", "select", "insert", "replace", "delete", "show_warnings", "show_status", "show_meta", "set", "begin", "commit", "rollback", "call", "desc", "show_tables", "update" }; /// refcounted vector template < typename T > class RefcountedVector_c : public CSphVector, public ISphRefcounted { }; typedef CSphRefcountedPtr < RefcountedVector_c > AttrValues_p; /// insert value struct SqlInsert_t { int m_iType; CSphString m_sVal; // OPTIMIZE? use char* and point to node? int64_t m_iVal; float m_fVal; AttrValues_p m_pVals; SqlInsert_t () : m_pVals ( NULL ) {} }; /// parser view on a generic node /// CAUTION, nodes get copied in the parser all the time, must keep assignment slim struct SqlNode_t { int m_iStart; int m_iEnd; CSphString m_sValue; int64_t m_iValue; float m_fValue; int m_iInstype; // REMOVE? should not we know this somehow else? AttrValues_p m_pValues; // FIXME? replace with numeric handles into parser state? SqlNode_t() : m_iValue ( 0 ) , m_pValues ( NULL ) {} }; #define YYSTYPE SqlNode_t enum SqlSet_e { SET_LOCAL, SET_GLOBAL_UVAR, SET_GLOBAL_SVAR }; /// parsing result /// one day, we will start subclassing this struct SqlStmt_t { SqlStmt_e m_eStmt; int m_iRowsAffected; const char * m_sStmt; // for error reporting // SELECT specific CSphQuery m_tQuery; CSphVector < CSphRefcountedPtr > m_dRefs; // used by INSERT, DELETE, CALL, DESC, ATTACH CSphString m_sIndex; // INSERT (and CALL) specific CSphVector m_dInsertValues; // reused by CALL CSphVector m_dInsertSchema; int m_iSchemaSz; // DELETE specific CSphVector m_dDeleteIds; // SET specific CSphString m_sSetName; // reused by ATTACH SqlSet_e m_eSet; int m_iSetValue; CSphString m_sSetValue; CSphVector m_dSetValues; bool m_bSetNull; // CALL specific CSphString m_sCallProc; CSphVector m_dCallOptNames; CSphVector m_dCallOptValues; CSphVector m_dCallStrings; // UPDATE specific CSphAttrUpdate m_tUpdate; int m_iListStart; // < the position of start and end of index's definition in original query. int m_iListEnd; // CREATE/DROP FUNCTION specific CSphString m_sUdfName; CSphString m_sUdfLib; ESphAttr m_eUdfType; SqlStmt_t () : m_eStmt ( STMT_PARSE_ERROR ) , m_iRowsAffected ( 0 ) , m_sStmt ( NULL ) , m_iSchemaSz ( 0 ) , m_eSet ( SET_LOCAL ) , m_iSetValue ( 0 ) , m_bSetNull ( false ) , m_iListStart ( -1 ) , m_iListEnd ( -1 ) { m_tQuery.m_eMode = SPH_MATCH_EXTENDED2; // only new and shiny matching and sorting m_tQuery.m_eSort = SPH_SORT_EXTENDED; m_tQuery.m_sSortBy = "@weight desc"; // default order m_tQuery.m_sOrderBy = "@weight desc"; } bool AddSchemaItem ( const char * psName ) { m_dInsertSchema.Add ( psName ); m_dInsertSchema.Last().ToLower(); m_iSchemaSz = m_dInsertSchema.GetLength(); return true; // stub; check if the given field actually exists in the schema } // check if the number of fields which would be inserted is in accordance to the given schema bool CheckInsertIntegrity() { // cheat: if no schema assigned, assume the size of schema as the size of the first row. // (if it is wrong, it will be revealed later) if ( !m_iSchemaSz ) m_iSchemaSz = m_dInsertValues.GetLength(); m_iRowsAffected++; return m_dInsertValues.GetLength()==m_iRowsAffected*m_iSchemaSz; } }; struct SqlParser_c : ISphNoncopyable { public: void * m_pScanner; const char * m_pBuf; const char * m_pLastTokenStart; CSphString * m_pParseError; CSphQuery * m_pQuery; bool m_bGotQuery; SqlStmt_t * m_pStmt; CSphVector & m_dStmt; ESphCollation m_eCollation; BYTE m_uSyntaxFlags; public: explicit SqlParser_c ( CSphVector & dStmt, ESphCollation eCollation ); void PushQuery (); bool AddOption ( const SqlNode_t & tIdent, const SqlNode_t & tValue ); bool AddOption ( const SqlNode_t & tIdent, const SqlNode_t & tValue, const CSphString & sArg ); bool AddOption ( const SqlNode_t & tIdent, CSphVector & dNamed ); void AddItem ( SqlNode_t * pExpr, ESphAggrFunc eFunc=SPH_AGGR_NONE, SqlNode_t * pStart=NULL, SqlNode_t * pEnd=NULL ); bool AddItem ( const char * pToken, SqlNode_t * pStart=NULL, SqlNode_t * pEnd=NULL ); void AliasLastItem ( SqlNode_t * pAlias ); void SetSelect ( SqlNode_t * pStart, SqlNode_t * pEnd=NULL ) { if ( m_pQuery ) { if ( pStart && ( m_pQuery->m_iSQLSelectStart<0 || m_pQuery->m_iSQLSelectStart>pStart->m_iStart ) ) m_pQuery->m_iSQLSelectStart = pStart->m_iStart; if ( !pEnd ) pEnd = pStart; if ( pEnd && ( m_pQuery->m_iSQLSelectEnd<0 || m_pQuery->m_iSQLSelectEndm_iEnd ) ) m_pQuery->m_iSQLSelectEnd = pEnd->m_iEnd; } } bool AddSchemaItem ( SqlNode_t * pNode ); void SetValue ( const char * sName, const SqlNode_t& tValue ); bool SetMatch ( const SqlNode_t& tValue ); void AddConst ( int iList, const SqlNode_t& tValue ); void SetStatement ( const SqlNode_t& tName, SqlSet_e eSet ); bool AddFloatRangeFilter ( const CSphString & sAttr, float fMin, float fMax ); bool AddUintRangeFilter ( const CSphString & sAttr, DWORD uMin, DWORD uMax ); bool AddUservarFilter ( const CSphString & sCol, const CSphString & sVar, bool bExclude ); bool AddDistinct ( SqlNode_t * pNewExpr, SqlNode_t * pStart, SqlNode_t * pEnd ); CSphFilterSettings * AddFilter ( const CSphString & sCol, ESphFilter eType ); inline CSphFilterSettings * AddValuesFilter ( const SqlNode_t& sCol ) { return AddFilter ( sCol.m_sValue, SPH_FILTER_VALUES ); } inline bool SetOldSyntax() { m_uSyntaxFlags |= 1; return IsGoodSyntax (); } inline bool SetNewSyntax() { m_uSyntaxFlags |= 2; return IsGoodSyntax (); } bool IsGoodSyntax (); inline bool IsDeprecatedSyntax () const { return m_uSyntaxFlags & 1; } int AllocNamedVec (); CSphVector & GetNamedVec ( int iIndex ); void FreeNamedVec ( int iIndex ); bool UpdateStatement ( SqlNode_t * pNode ); void UpdateAttr ( const CSphString&, const SqlNode_t * pValue, ESphAttr eType = SPH_ATTR_INTEGER ); void UpdateMVAAttr ( const CSphString& sName, const SqlNode_t& dValues ); private: void AutoAlias ( CSphQueryItem & tItem, SqlNode_t * pStart, SqlNode_t * pEnd ); void AddUpdatedAttr ( const CSphString&, ESphAttr eType ); protected: bool m_bNamedVecBusy; CSphVector m_dNamedVec; }; static void AddInsval ( CSphVector & dVec, const SqlNode_t & tNode ) { SqlInsert_t & tIns = dVec.Add(); tIns.m_iType = tNode.m_iInstype; tIns.m_iVal = tNode.m_iValue; // OPTIMIZE? copy conditionally based on type? tIns.m_fVal = tNode.m_fValue; tIns.m_sVal = tNode.m_sValue; tIns.m_pVals = tNode.m_pValues; } ////////////////////////////////////////////////////////////////////////// // unused parameter, simply to avoid type clash between all my yylex() functions #define YYLEX_PARAM pParser->m_pScanner, pParser #ifdef NDEBUG #define YY_DECL int yylex ( YYSTYPE * lvalp, void * yyscanner, SqlParser_c * pParser ) #else #define YY_DECL int yylexd ( YYSTYPE * lvalp, void * yyscanner, SqlParser_c * pParser ) #endif #include "llsphinxql.c" void yyerror ( SqlParser_c * pParser, const char * sMessage ) { // flex put a zero at last token boundary; make it undo that yylex_unhold ( pParser->m_pScanner ); // create our error message pParser->m_pParseError->SetSprintf ( "sphinxql: %s near '%s'", sMessage, pParser->m_pLastTokenStart ? pParser->m_pLastTokenStart : "(null)" ); // fixup TOK_xxx thingies char * s = const_cast ( pParser->m_pParseError->cstr() ); char * d = s; while ( *s ) { if ( strncmp ( s, "TOK_", 4 )==0 ) s += 4; else *d++ = *s++; } *d = '\0'; } #ifndef NDEBUG // using a proxy to be possible to debug inside yylex int yylex ( YYSTYPE * lvalp, void * yyscanner, SqlParser_c * pParser ) { int res = yylexd ( lvalp, yyscanner, pParser ); return res; } #endif #include "yysphinxql.c" ////////////////////////////////////////////////////////////////////////// class CSphMatchVariant : public CSphMatch { public: inline static SphAttr_t ToInt ( const SqlInsert_t & tVal ) { switch ( tVal.m_iType ) { case TOK_QUOTED_STRING : return strtoul ( tVal.m_sVal.cstr(), NULL, 10 ); // FIXME? report conversion error? case TOK_CONST_INT: return int(tVal.m_iVal); case TOK_CONST_FLOAT: return int(tVal.m_fVal); // FIXME? report conversion error } return 0; } inline static SphAttr_t ToBigInt ( const SqlInsert_t & tVal ) { switch ( tVal.m_iType ) { case TOK_QUOTED_STRING : return strtoll ( tVal.m_sVal.cstr(), NULL, 10 ); // FIXME? report conversion error? case TOK_CONST_INT: return tVal.m_iVal; case TOK_CONST_FLOAT: return int(tVal.m_fVal); // FIXME? report conversion error? } return 0; } #if USE_64BIT #define ToDocid ToBigInt #else #define ToDocid ToInt #endif // USE_64BIT bool SetAttr ( const CSphAttrLocator & tLoc, const SqlInsert_t & tVal, ESphAttr eTargetType ) { switch ( eTargetType ) { case SPH_ATTR_INTEGER: case SPH_ATTR_TIMESTAMP: CSphMatch::SetAttr ( tLoc, ToInt(tVal) ); break; case SPH_ATTR_BIGINT: CSphMatch::SetAttr ( tLoc, ToBigInt(tVal) ); break; case SPH_ATTR_FLOAT: if ( tVal.m_iType==TOK_QUOTED_STRING ) SetAttrFloat ( tLoc, (float)strtod ( tVal.m_sVal.cstr(), NULL ) ); // FIXME? report conversion error? else if ( tVal.m_iType==TOK_CONST_INT ) SetAttrFloat ( tLoc, float(tVal.m_iVal) ); // FIXME? report conversion error? else if ( tVal.m_iType==TOK_CONST_FLOAT ) SetAttrFloat ( tLoc, tVal.m_fVal ); break; case SPH_ATTR_STRING: case SPH_ATTR_UINT32SET: case SPH_ATTR_UINT64SET: CSphMatch::SetAttr ( tLoc, 0 ); break; default: return false; }; return true; } inline bool SetDefaultAttr ( const CSphAttrLocator & tLoc, ESphAttr eTargetType ) { SqlInsert_t tVal; tVal.m_iType = TOK_CONST_INT; tVal.m_iVal = 0; return SetAttr ( tLoc, tVal, eTargetType ); } }; SqlParser_c::SqlParser_c ( CSphVector & dStmt, ESphCollation eCollation ) : m_pQuery ( NULL ) , m_pStmt ( NULL ) , m_dStmt ( dStmt ) , m_eCollation ( eCollation ) , m_uSyntaxFlags ( 0 ) , m_bNamedVecBusy ( false ) { assert ( !m_dStmt.GetLength() ); PushQuery (); } void SqlParser_c::PushQuery () { assert ( m_dStmt.GetLength() || ( !m_pQuery && !m_pStmt ) ); // post set proper result-set order if ( m_dStmt.GetLength() ) { if ( m_pQuery->m_sGroupBy.IsEmpty() ) m_pQuery->m_sSortBy = m_pQuery->m_sOrderBy; else m_pQuery->m_sGroupSortBy = m_pQuery->m_sOrderBy; } // add new m_dStmt.Add ( SqlStmt_t() ); m_pStmt = &m_dStmt.Last(); m_pQuery = &m_pStmt->m_tQuery; m_pQuery->m_eCollation = m_eCollation; m_bGotQuery = false; } bool SqlParser_c::AddOption ( const SqlNode_t& tIdent, const SqlNode_t& tValue ) { CSphString sOpt = tIdent.m_sValue; CSphString sVal = tValue.m_sValue; sOpt.ToLower (); sVal.ToLower (); if ( sOpt=="ranker" ) { if ( sVal=="proximity_bm25" ) m_pQuery->m_eRanker = SPH_RANK_PROXIMITY_BM25; else if ( sVal=="bm25" ) m_pQuery->m_eRanker = SPH_RANK_BM25; else if ( sVal=="none" ) m_pQuery->m_eRanker = SPH_RANK_NONE; else if ( sVal=="wordcount" ) m_pQuery->m_eRanker = SPH_RANK_WORDCOUNT; else if ( sVal=="proximity" ) m_pQuery->m_eRanker = SPH_RANK_PROXIMITY; else if ( sVal=="matchany" ) m_pQuery->m_eRanker = SPH_RANK_MATCHANY; else if ( sVal=="fieldmask" ) m_pQuery->m_eRanker = SPH_RANK_FIELDMASK; else if ( sVal=="sph04" ) m_pQuery->m_eRanker = SPH_RANK_SPH04; else if ( sVal=="expr" ) { m_pParseError->SetSprintf ( "missing ranker expression (use OPTION ranker=expr('1+2') for example)" ); return false; } else { m_pParseError->SetSprintf ( "unknown ranker '%s'", sVal.cstr() ); return false; } } else if ( sOpt=="max_matches" ) { m_pQuery->m_iMaxMatches = (int)tValue.m_iValue; } else if ( sOpt=="cutoff" ) { m_pQuery->m_iCutoff = (int)tValue.m_iValue; } else if ( sOpt=="max_query_time" ) { m_pQuery->m_uMaxQueryMsec = (int)tValue.m_iValue; } else if ( sOpt=="retry_count" ) { m_pQuery->m_iRetryCount = (int)tValue.m_iValue; } else if ( sOpt=="retry_delay" ) { m_pQuery->m_iRetryDelay = (int)tValue.m_iValue; } else if ( sOpt=="reverse_scan" ) { m_pQuery->m_bReverseScan = ( tValue.m_iValue!=0 ); } else if ( sOpt=="comment" ) { m_pQuery->m_sComment = tValue.m_sValue; } else { m_pParseError->SetSprintf ( "unknown option '%s' (or bad argument type)", tIdent.m_sValue.cstr() ); return false; } return true; } bool SqlParser_c::AddOption ( const SqlNode_t & tIdent, const SqlNode_t & tValue, const CSphString & sArg ) { CSphString sOpt = tIdent.m_sValue; CSphString sVal = tValue.m_sValue; sOpt.ToLower (); sVal.ToLower (); if ( sOpt=="ranker" && sVal=="expr" ) { m_pQuery->m_eRanker = SPH_RANK_EXPR; m_pQuery->m_sRankerExpr = sArg; return true; } else { m_pParseError->SetSprintf ( "unknown option or extra argument to '%s=%s'", tIdent.m_sValue.cstr(), tValue.m_sValue.cstr() ); return false; } } bool SqlParser_c::AddOption ( const SqlNode_t & tIdent, CSphVector & dNamed ) { CSphString sOpt = tIdent.m_sValue; sOpt.ToLower (); if ( sOpt=="field_weights" ) { m_pQuery->m_dFieldWeights.SwapData ( dNamed ); } else if ( sOpt=="index_weights" ) { m_pQuery->m_dIndexWeights.SwapData ( dNamed ); } else { m_pParseError->SetSprintf ( "unknown option '%s' (or bad argument type)", tIdent.m_sValue.cstr() ); return false; } return true; } void SqlParser_c::AliasLastItem ( SqlNode_t * pAlias ) { if ( pAlias ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Last(); tItem.m_sAlias.SetBinary ( m_pBuf + pAlias->m_iStart, pAlias->m_iEnd - pAlias->m_iStart ); tItem.m_sAlias.ToLower(); SetSelect ( pAlias ); } } void SqlParser_c::AutoAlias ( CSphQueryItem & tItem, SqlNode_t * pStart, SqlNode_t * pEnd ) { if ( pStart && pEnd ) { tItem.m_sAlias.SetBinary ( m_pBuf + pStart->m_iStart, pEnd->m_iEnd - pStart->m_iStart ); tItem.m_sAlias.ToLower(); } else tItem.m_sAlias = tItem.m_sExpr; SetSelect ( pStart, pEnd ); } void SqlParser_c::AddItem ( SqlNode_t * pExpr, ESphAggrFunc eAggrFunc, SqlNode_t * pStart, SqlNode_t * pEnd ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Add(); tItem.m_sExpr.SetBinary ( m_pBuf + pExpr->m_iStart, pExpr->m_iEnd - pExpr->m_iStart ); tItem.m_sExpr.ToLower(); tItem.m_eAggrFunc = eAggrFunc; AutoAlias ( tItem, pStart?pStart:pExpr, pEnd?pEnd:pExpr ); } bool SqlParser_c::AddItem ( const char * pToken, SqlNode_t * pStart, SqlNode_t * pEnd ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Add(); tItem.m_sExpr = pToken; tItem.m_eAggrFunc = SPH_AGGR_NONE; tItem.m_sExpr.ToLower(); AutoAlias ( tItem, pStart, pEnd ); return SetNewSyntax(); } bool SqlParser_c::AddDistinct ( SqlNode_t * pNewExpr, SqlNode_t * pStart, SqlNode_t * pEnd ) { if ( !m_pQuery->m_sGroupDistinct.IsEmpty() ) { yyerror ( this, "too many COUNT(DISTINCT) clauses" ); return false; } m_pQuery->m_sGroupDistinct = pNewExpr->m_sValue; return AddItem ( "@distinct", pStart, pEnd ); } bool SqlParser_c::AddSchemaItem ( YYSTYPE * pNode ) { assert ( m_pStmt ); CSphString sItem; sItem.SetBinary ( m_pBuf + pNode->m_iStart, pNode->m_iEnd - pNode->m_iStart ); return m_pStmt->AddSchemaItem ( sItem.cstr() ); } bool SqlParser_c::SetMatch ( const YYSTYPE& tValue ) { if ( m_bGotQuery ) { yyerror ( this, "too many MATCH() clauses" ); return false; }; m_pQuery->m_sQuery = tValue.m_sValue; m_pQuery->m_sRawQuery = tValue.m_sValue; return m_bGotQuery = true; } void SqlParser_c::AddConst ( int iList, const YYSTYPE& tValue ) { CSphVector & dVec = GetNamedVec ( iList ); dVec.Add(); dVec.Last().m_sName = tValue.m_sValue; dVec.Last().m_sName.ToLower(); dVec.Last().m_iValue = (int) tValue.m_iValue; } void SqlParser_c::SetStatement ( const YYSTYPE& tName, SqlSet_e eSet ) { m_pStmt->m_eStmt = STMT_SET; m_pStmt->m_eSet = eSet; m_pStmt->m_sSetName = tName.m_sValue; } bool SqlParser_c::UpdateStatement ( SqlNode_t * pNode ) { m_pStmt->m_eStmt = STMT_UPDATE; m_pStmt->m_iListStart = pNode->m_iStart; m_pStmt->m_iListEnd = pNode->m_iEnd; m_pStmt->m_sIndex.SetBinary ( m_pBuf + pNode->m_iStart, pNode->m_iEnd - pNode->m_iStart ); m_pStmt->m_tUpdate.m_dRowOffset.Add ( 0 ); return true; } void SqlParser_c::AddUpdatedAttr ( const CSphString& sName, ESphAttr eType ) { CSphAttrUpdate & tUpd = m_pStmt->m_tUpdate; CSphColumnInfo & tAttr = tUpd.m_dAttrs.Add(); tAttr.m_sName = sName; tAttr.m_sName.ToLower(); tAttr.m_eAttrType = eType; // sorry, ints only for now, riding on legacy shit! } void SqlParser_c::UpdateAttr ( const CSphString& sName, const SqlNode_t * pValue, ESphAttr eType ) { assert ( eType==SPH_ATTR_FLOAT || eType==SPH_ATTR_INTEGER || eType==SPH_ATTR_BIGINT ); if ( eType==SPH_ATTR_FLOAT ) { m_pStmt->m_tUpdate.m_dPool.Add ( *(const DWORD*)( &pValue->m_fValue ) ); } else if ( eType==SPH_ATTR_INTEGER || eType==SPH_ATTR_BIGINT ) { m_pStmt->m_tUpdate.m_dPool.Add ( (DWORD) pValue->m_iValue ); DWORD uHi = (DWORD) ( pValue->m_iValue>>32 ); if ( uHi ) { m_pStmt->m_tUpdate.m_dPool.Add ( uHi ); eType = SPH_ATTR_BIGINT; } } AddUpdatedAttr ( sName, eType ); } void SqlParser_c::UpdateMVAAttr ( const CSphString & sName, const SqlNode_t & dValues ) { CSphAttrUpdate & tUpd = m_pStmt->m_tUpdate; ESphAttr eType = SPH_ATTR_UINT32SET; if ( dValues.m_pValues.Ptr() && dValues.m_pValues->GetLength()>0 ) { // got MVA values, let's process them dValues.m_pValues->Uniq(); // don't need dupes within MVA tUpd.m_dPool.Add ( dValues.m_pValues->GetLength()*2 ); SphAttr_t * pVal = dValues.m_pValues.Ptr()->Begin(); SphAttr_t * pValMax = pVal + dValues.m_pValues->GetLength(); for ( ;pValUINT_MAX ) { eType = SPH_ATTR_UINT64SET; } tUpd.m_dPool.Add ( (DWORD)uVal ); tUpd.m_dPool.Add ( (DWORD)( uVal>>32 ) ); } } else { // no values, means we should delete the attribute // we signal that to the update code by putting a single zero // to the values pool (meaning a zero-length MVA values list) tUpd.m_dPool.Add ( 0 ); } AddUpdatedAttr ( sName, eType ); } CSphFilterSettings * SqlParser_c::AddFilter ( const CSphString & sCol, ESphFilter eType ) { if ( sCol=="@weight" || sCol=="@count" || sCol=="count(*)" || sCol=="weight()" ) { yyerror ( this, "Aggregates in 'where' clause prohibited" ); return NULL; } CSphFilterSettings * pFilter = &m_pQuery->m_dFilters.Add(); pFilter->m_sAttrName = sCol; pFilter->m_eType = eType; pFilter->m_sAttrName.ToLower(); return pFilter; } bool SqlParser_c::AddFloatRangeFilter ( const CSphString & sAttr, float fMin, float fMax ) { CSphFilterSettings * pFilter = AddFilter ( sAttr, SPH_FILTER_FLOATRANGE ); if ( !pFilter ) return false; pFilter->m_fMinValue = fMin; pFilter->m_fMaxValue = fMax; return true; } bool SqlParser_c::AddUintRangeFilter ( const CSphString & sAttr, DWORD uMin, DWORD uMax ) { CSphFilterSettings * pFilter = AddFilter ( sAttr, SPH_FILTER_RANGE ); if ( !pFilter ) return false; pFilter->m_uMinValue = uMin; pFilter->m_uMaxValue = uMax; return true; } bool SqlParser_c::AddUservarFilter ( const CSphString & sCol, const CSphString & sVar, bool bExclude ) { CSphScopedLock tLock ( g_tUservarsMutex ); Uservar_t * pVar = g_hUservars ( sVar ); if ( !pVar ) { yyerror ( this, "undefined global variable in IN clause" ); return false; } assert ( pVar->m_eType==USERVAR_INT_SET ); CSphFilterSettings * pFilter = AddFilter ( sCol, SPH_FILTER_VALUES ); if ( !pFilter ) return false; pFilter->m_bExclude = bExclude; // tricky black magic // we want to avoid copying the data, hence external values in the filter // we need to guarantee the data (uservar value) lifetime, then // suddenly, enter mutex-protected refcounted value objects // suddenly, we need to track those values in the statement object, too assert ( pVar->m_pVal ); CSphRefcountedPtr & tRef = m_pStmt->m_dRefs.Add(); tRef = pVar->m_pVal; // take over semantics, and thus NO (!) automatic addref pVar->m_pVal->AddRef(); // so do that addref manually pFilter->SetExternalValues ( pVar->m_pVal->Begin(), pVar->m_pVal->GetLength() ); return true; } bool SqlParser_c::IsGoodSyntax () { if ( ( m_uSyntaxFlags & 3 )!=3 ) return true; yyerror ( this, "Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited" ); return false; } int SqlParser_c::AllocNamedVec () { // we only allow one such vector at a time, right now assert ( !m_bNamedVecBusy ); m_bNamedVecBusy = true; m_dNamedVec.Resize ( 0 ); return 0; } #ifndef NDEBUG CSphVector & SqlParser_c::GetNamedVec ( int iIndex ) #else CSphVector & SqlParser_c::GetNamedVec ( int ) #endif { assert ( m_bNamedVecBusy && iIndex==0 ); return m_dNamedVec; } #ifndef NDEBUG void SqlParser_c::FreeNamedVec ( int iIndex ) #else void SqlParser_c::FreeNamedVec ( int ) #endif { assert ( m_bNamedVecBusy && iIndex==0 ); m_bNamedVecBusy = false; m_dNamedVec.Resize ( 0 ); } bool ParseSqlQuery ( const CSphString & sQuery, CSphVector & dStmt, CSphString & sError, ESphCollation eCollation ) { SqlParser_c tParser ( dStmt, eCollation ); tParser.m_pBuf = sQuery.cstr(); tParser.m_pLastTokenStart = NULL; tParser.m_pParseError = &sError; tParser.m_eCollation = eCollation; int iLen = strlen ( sQuery.cstr() ); char * sEnd = (char*)sQuery.cstr() + iLen; sEnd[0] = 0; // prepare for yy_scan_buffer sEnd[1] = 0; // this is ok because string allocates a small gap yylex_init ( &tParser.m_pScanner ); YY_BUFFER_STATE tLexerBuffer = yy_scan_buffer ( (char*)sQuery.cstr(), iLen+2, tParser.m_pScanner ); if ( !tLexerBuffer ) { sError = "internal error: yy_scan_buffer() failed"; return false; } int iRes = yyparse ( &tParser ); yy_delete_buffer ( tLexerBuffer, tParser.m_pScanner ); yylex_destroy ( tParser.m_pScanner ); dStmt.Pop(); // last query is always dummy ARRAY_FOREACH ( i, dStmt ) { CSphQuery & tQuery = dStmt[i].m_tQuery; if ( tQuery.m_iSQLSelectStart>=0 ) { tQuery.m_sSelect.SetBinary ( tParser.m_pBuf + tQuery.m_iSQLSelectStart, tQuery.m_iSQLSelectEnd - tQuery.m_iSQLSelectStart ); } } if ( iRes!=0 || !dStmt.GetLength() ) return false; if ( tParser.IsDeprecatedSyntax() ) sError = "Using the old-fashion @variables (@count, @weight, etc.) is deprecated"; return true; } ///////////////////////////////////////////////////////////////////////////// int sphGetPassageBoundary ( const CSphString & sPassageBoundaryMode ) { if ( sPassageBoundaryMode.IsEmpty() ) return 0; int iMode = 0; if ( sPassageBoundaryMode=="sentence" ) iMode = MAGIC_CODE_SENTENCE; else if ( sPassageBoundaryMode=="paragraph" ) iMode = MAGIC_CODE_PARAGRAPH; else if ( sPassageBoundaryMode=="zone" ) iMode = MAGIC_CODE_ZONE; return iMode; } bool sphCheckOptionsSPZ ( const ExcerptQuery_t & q, const CSphString & sPassageBoundaryMode, CSphString & sError ) { if ( q.m_iPassageBoundary ) { if ( q.m_iAround==0 ) { sError.SetSprintf ( "invalid combination of passage_boundary=%s and around=%d", sPassageBoundaryMode.cstr(), q.m_iAround ); return false; } else if ( q.m_bUseBoundaries ) { sError.SetSprintf ( "invalid combination of passage_boundary=%s and use_boundaries", sPassageBoundaryMode.cstr() ); return false; } } if ( q.m_bEmitZones ) { if ( q.m_iPassageBoundary!=MAGIC_CODE_ZONE ) { sError.SetSprintf ( "invalid combination of passage_boundary=%s and emit_zones", sPassageBoundaryMode.cstr() ); return false; } if ( !( q.m_sStripMode=="strip" || q.m_sStripMode=="index" ) ) { sError.SetSprintf ( "invalid combination of strip=%s and emit_zones", q.m_sStripMode.cstr() ); return false; } } return true; } ///////////////////////////////////////////////////////////////////////////// // EXCERPTS HANDLER ///////////////////////////////////////////////////////////////////////////// enum eExcerpt_Flags { EXCERPT_FLAG_REMOVESPACES = 1, EXCERPT_FLAG_EXACTPHRASE = 2, EXCERPT_FLAG_SINGLEPASSAGE = 4, EXCERPT_FLAG_USEBOUNDARIES = 8, EXCERPT_FLAG_WEIGHTORDER = 16, EXCERPT_FLAG_QUERY = 32, EXCERPT_FLAG_FORCE_ALL_WORDS = 64, EXCERPT_FLAG_LOAD_FILES = 128, EXCERPT_FLAG_ALLOW_EMPTY = 256, EXCERPT_FLAG_EMIT_ZONES = 512, EXCERPT_FLAG_FILES_SCATTERED = 1024 }; enum { PROCESSED_ITEM = -2, EOF_ITEM = -1 }; struct SnippetWorker_t { int64_t m_iTotal; int m_iHead; bool m_bLocal; SnippetWorker_t() : m_iTotal ( 0 ) , m_iHead ( EOF_ITEM ) , m_bLocal ( false ) {} }; struct SnippetsRemote_t : ISphNoncopyable { CSphVector m_dAgents; CSphVector m_dWorkers; CSphVector & m_dQueries; int m_iAgentConnectTimeout; int m_iAgentQueryTimeout; explicit SnippetsRemote_t ( CSphVector & dQueries ) : m_dQueries ( dQueries ) , m_iAgentConnectTimeout ( 0 ) , m_iAgentQueryTimeout ( 0 ) {} }; struct SnippetThread_t { SphThread_t m_tThd; CSphMutex * m_pLock; int m_iQueries; ExcerptQuery_t * m_pQueries; volatile int * m_pCurQuery; CSphIndex * m_pIndex; CrashQuery_t m_tCrashQuery; SnippetThread_t() : m_pLock ( NULL ) , m_iQueries ( 0 ) , m_pQueries ( NULL ) , m_pCurQuery ( NULL ) , m_pIndex ( NULL ) {} }; struct SnippetRequestBuilder_t : public IRequestBuilder_t { explicit SnippetRequestBuilder_t ( const SnippetsRemote_t * pWorker ) : m_pWorker ( pWorker ) , m_iLastAgent ( -1 ) , m_iLastWorker ( -1 ) , m_iNumDocs ( -1 ) , m_iReqLen ( -1 ) , m_bScattered ( false ) {} virtual void BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int iNumAgent ) const; private: const SnippetsRemote_t * m_pWorker; mutable int m_iLastAgent; ///< just a helper to optimize consequental linear search mutable int m_iLastWorker; ///< just a helper to optimize consequental linear search mutable int m_iNumDocs; ///< optimize numdocs/length calculation in scattered case mutable int m_iReqLen; mutable bool m_bScattered; }; struct SnippetReplyParser_t : public IReplyParser_t { explicit SnippetReplyParser_t ( SnippetsRemote_t * pWorker ) : m_pWorker ( pWorker ) , m_iLastAgent ( -1 ) , m_iLastWorker ( -1 ) {} virtual bool ParseReply ( MemInputBuffer_c & tReq, AgentConn_t &, int iNumAgent ) const; private: SnippetsRemote_t * m_pWorker; mutable int m_iLastAgent; ///< just a helper to optimize consequental linear search mutable int m_iLastWorker; ///< just a helper to optimize consequental linear search }; static int SnippetGetCurrentWorker ( const int iNumAgent, const int m_iLastAgent, const int m_iLastWorker, const SnippetsRemote_t * m_pWorker ) { int iCurrentWorker = 0; if ( iNumAgent==m_iLastAgent+1 ) { for ( int i=m_iLastWorker+1; im_dWorkers.GetLength(); i++ ) if ( !m_pWorker->m_dWorkers[i].m_bLocal ) { iCurrentWorker = i; break; } } else { int j = iNumAgent; ARRAY_FOREACH ( i, m_pWorker->m_dWorkers ) if ( !m_pWorker->m_dWorkers[i].m_bLocal ) { if ( j-- ) continue; iCurrentWorker = i; break; } } return iCurrentWorker; } void SnippetRequestBuilder_t::BuildRequest ( const char * sIndex, NetOutputBuffer_c & tOut, int iNumAgent ) const { m_iLastWorker = SnippetGetCurrentWorker ( iNumAgent, m_iLastAgent, m_iLastWorker, m_pWorker ); m_iLastAgent = iNumAgent; const CSphVector & dQueries = m_pWorker->m_dQueries; const ExcerptQuery_t & q = dQueries[0]; const SnippetWorker_t & tWorker = m_pWorker->m_dWorkers[m_iLastWorker]; if ( m_iNumDocs < 0 ) m_bScattered = ( q.m_iLoadFiles & 2 )!=0; if ( !m_bScattered || ( m_bScattered && m_iNumDocs<0 ) ) { m_iReqLen = 60 // 15 ints/dwords - params, strlens, etc. + strlen ( sIndex ) + q.m_sWords.Length() + q.m_sBeforeMatch.Length() + q.m_sAfterMatch.Length() + q.m_sChunkSeparator.Length() + q.m_sStripMode.Length() + q.m_sRawPassageBoundary.Length(); m_iNumDocs = 0; for ( int iDoc = tWorker.m_iHead; iDoc!=EOF_ITEM; iDoc=dQueries[iDoc].m_iNext ) { ++m_iNumDocs; m_iReqLen += 4 + dQueries[iDoc].m_sSource.Length(); } } tOut.SendDword ( SPHINX_SEARCHD_PROTO ); tOut.SendWord ( SEARCHD_COMMAND_EXCERPT ); tOut.SendWord ( VER_COMMAND_EXCERPT ); tOut.SendInt ( m_iReqLen ); tOut.SendInt ( 0 ); if ( m_bScattered ) tOut.SendInt ( q.m_iRawFlags & ~EXCERPT_FLAG_LOAD_FILES ); tOut.SendString ( sIndex ); tOut.SendString ( q.m_sWords.cstr() ); tOut.SendString ( q.m_sBeforeMatch.cstr() ); tOut.SendString ( q.m_sAfterMatch.cstr() ); tOut.SendString ( q.m_sChunkSeparator.cstr() ); tOut.SendInt ( q.m_iLimit ); tOut.SendInt ( q.m_iAround ); tOut.SendInt ( q.m_iLimitPassages ); tOut.SendInt ( q.m_iLimitWords ); tOut.SendInt ( q.m_iPassageId ); tOut.SendString ( q.m_sStripMode.cstr() ); tOut.SendString ( q.m_sRawPassageBoundary.cstr() ); tOut.SendInt ( m_iNumDocs ); for ( int iDoc = tWorker.m_iHead; iDoc!=EOF_ITEM; iDoc=dQueries[iDoc].m_iNext ) tOut.SendString ( dQueries[iDoc].m_sSource.cstr() ); } bool SnippetReplyParser_t::ParseReply ( MemInputBuffer_c & tReq, AgentConn_t &, int iNumAgent ) const { m_iLastWorker = SnippetGetCurrentWorker ( iNumAgent, m_iLastAgent, m_iLastWorker, m_pWorker ); m_iLastAgent = iNumAgent; CSphVector & dQueries = m_pWorker->m_dQueries; const SnippetWorker_t & tWorker = m_pWorker->m_dWorkers[m_iLastWorker]; int iDoc = tWorker.m_iHead; bool bOk = true; while ( iDoc!=EOF_ITEM ) { if ( ( dQueries[iDoc].m_iLoadFiles&2 )!=0 ) // NOLINT { char * sRes = tReq.GetString().Leak(); if ( sRes && !strlen(sRes) ) SafeDelete ( sRes ); if ( sRes ) { if ( dQueries[iDoc].m_sRes && strlen ( dQueries[iDoc].m_sRes )!=0 ) { if ( strcmp ( sRes, dQueries[iDoc].m_sRes )!=0 ) bOk = false; SafeDelete ( dQueries[iDoc].m_sRes ); } else dQueries[iDoc].m_sError = ""; dQueries[iDoc].m_sRes = sRes; } iDoc = dQueries[iDoc].m_iNext; continue; } dQueries[iDoc].m_sRes = tReq.GetString().Leak(); int iNextDoc = dQueries[iDoc].m_iNext; dQueries[iDoc].m_iNext = PROCESSED_ITEM; iDoc = iNextDoc; } return bOk; } static bool SnippetTransformPassageMacros ( CSphString & sSrc, CSphString & sPost ) { const char sPassageMacro[] = "%PASSAGE_ID%"; const char * sPass = NULL; if ( !sSrc.IsEmpty() ) sPass = strstr ( sSrc.cstr(), sPassageMacro ); if ( !sPass ) return false; int iSrcLen = sSrc.Length(); int iPassLen = sizeof ( sPassageMacro ) - 1; int iTailLen = iSrcLen - iPassLen - ( sPass - sSrc.cstr() ); // copy tail if ( iTailLen ) sPost.SetBinary ( sPass+iPassLen, iTailLen ); CSphString sPre; sPre.SetBinary ( sSrc.cstr(), sPass - sSrc.cstr() ); sSrc.Swap ( sPre ); return true; } static bool IsSPZEnabled ( const ExcerptQuery_t & q ) { return ( q.m_iPassageBoundary || ( q.m_sStripMode=="retain" && q.m_bHighlightQuery ) ); } static bool SetupStripperSPZ ( const CSphIndexSettings & tSettings, const ExcerptQuery_t & q, CSphScopedPtr & tStripper, ISphTokenizer * pTokenizer, CSphString & sError ) { bool bSetupSPZ = IsSPZEnabled ( q ); if ( bSetupSPZ && ( !pTokenizer->EnableSentenceIndexing ( sError ) || !pTokenizer->EnableZoneIndexing ( sError ) ) ) { return false; } if ( q.m_sStripMode=="strip" || q.m_sStripMode=="retain" || ( q.m_sStripMode=="index" && tSettings.m_bHtmlStrip ) ) { // don't strip HTML markup in 'retain' mode - proceed zones only tStripper = new CSphHTMLStripper ( q.m_sStripMode!="retain" ); if ( q.m_sStripMode=="index" ) { if ( !tStripper->SetIndexedAttrs ( tSettings.m_sHtmlIndexAttrs.cstr (), sError ) || !tStripper->SetRemovedElements ( tSettings.m_sHtmlRemoveElements.cstr (), sError ) ) { sError.SetSprintf ( "HTML stripper config error: %s", sError.cstr() ); return false; } } if ( bSetupSPZ ) { tStripper->EnableParagraphs(); } // handle zone(s) in special mode only when passage_boundary enabled if ( bSetupSPZ && !tStripper->SetZones ( tSettings.m_sZones.cstr (), sError ) ) { sError.SetSprintf ( "HTML stripper config error: %s", sError.cstr() ); return false; } } return true; } static CSphDict * SetupExactDict ( const CSphIndexSettings & tSettings, const ExcerptQuery_t & q, CSphScopedPtr & tExact, CSphDict * pDict, ISphTokenizer * pTokenizer ) { // handle index_exact_words if ( !( q.m_bHighlightQuery && tSettings.m_bIndexExactWords ) ) return pDict; CSphRemapRange tEq ( '=', '=', '=' ); // FIXME? check and warn if star was already there pTokenizer->AddCaseFolding ( tEq ); tExact = new CSphDictExact ( pDict ); return tExact.Ptr(); } class SnippetContext_t : ISphNoncopyable { private: CSphScopedPtr m_tDictCloned; CSphScopedPtr m_tExactDict; CSphScopedPtr m_tQueryTokenizer; public: CSphDict * m_pDict; CSphScopedPtr m_tTokenizer; CSphScopedPtr m_tStripper; ISphTokenizer * m_pQueryTokenizer; SnippetContext_t() : m_tDictCloned ( NULL ) , m_tExactDict ( NULL ) , m_tQueryTokenizer ( NULL ) , m_pDict ( NULL ) , m_tTokenizer ( NULL ) , m_tStripper ( NULL ) , m_pQueryTokenizer ( NULL ) { } bool Setup ( CSphIndex * pIndex, const ExcerptQuery_t & tQuery, CSphString & sError ) { CSphScopedPtr tDictCloned ( NULL ); m_pDict = pIndex->GetDictionary(); if ( m_pDict->HasState() ) { m_tDictCloned = m_pDict = m_pDict->Clone(); } m_tTokenizer = pIndex->GetTokenizer()->Clone ( true ); m_pQueryTokenizer = m_tTokenizer.Ptr(); if ( !SetupStripperSPZ ( pIndex->GetSettings(), tQuery, m_tStripper, m_tTokenizer.Ptr(), sError ) ) return false; if ( IsSPZEnabled ( tQuery ) ) { m_tQueryTokenizer = pIndex->GetTokenizer()->Clone ( true ); m_pQueryTokenizer = m_tQueryTokenizer.Ptr(); } //////////////////////////// // setup exact dictionary if needed //////////////////////////// m_pDict = SetupExactDict ( pIndex->GetSettings(), tQuery, m_tExactDict, m_pDict, m_tTokenizer.Ptr() ); return true; } }; void SnippetThreadFunc ( void * pArg ) { SnippetThread_t * pDesc = (SnippetThread_t*) pArg; // setup query guard for thread SphCrashLogger_c tQueryTLS; tQueryTLS.SetupTLS (); SphCrashLogger_c::SetLastQuery ( pDesc->m_tCrashQuery ); SnippetContext_t tCtx; tCtx.Setup ( pDesc->m_pIndex, *pDesc->m_pQueries, pDesc->m_pQueries->m_sError ); for ( ;; ) { pDesc->m_pLock->Lock(); if ( *pDesc->m_pCurQuery==pDesc->m_iQueries ) { pDesc->m_pLock->Unlock(); return; } ExcerptQuery_t * pQuery = pDesc->m_pQueries + (*pDesc->m_pCurQuery); (*pDesc->m_pCurQuery)++; bool bDone = ( *pDesc->m_pCurQuery==pDesc->m_iQueries ); pDesc->m_pLock->Unlock(); if ( pQuery->m_iNext!=PROCESSED_ITEM ) continue; pQuery->m_sRes = sphBuildExcerpt ( *pQuery, tCtx.m_pDict, tCtx.m_tTokenizer.Ptr(), &pDesc->m_pIndex->GetMatchSchema(), pDesc->m_pIndex, pQuery->m_sError, tCtx.m_tStripper.Ptr(), tCtx.m_pQueryTokenizer ); if ( bDone ) return; } } int GetRawSnippetFlags ( const ExcerptQuery_t& q ) { int iRawFlags = 0; iRawFlags |= q.m_bRemoveSpaces ? EXCERPT_FLAG_REMOVESPACES : 0; iRawFlags |= q.m_bUseBoundaries ? EXCERPT_FLAG_USEBOUNDARIES : 0; iRawFlags |= q.m_bWeightOrder ? EXCERPT_FLAG_WEIGHTORDER : 0; iRawFlags |= q.m_bHighlightQuery ? EXCERPT_FLAG_QUERY : 0; iRawFlags |= q.m_bForceAllWords ? EXCERPT_FLAG_FORCE_ALL_WORDS : 0; iRawFlags |= q.m_iLimitPassages ? EXCERPT_FLAG_SINGLEPASSAGE : 0; iRawFlags |= ( q.m_iLoadFiles & 1 ) ? EXCERPT_FLAG_LOAD_FILES : 0; iRawFlags |= ( q.m_iLoadFiles & 2 ) ? EXCERPT_FLAG_FILES_SCATTERED : 0; iRawFlags |= q.m_bAllowEmpty ? EXCERPT_FLAG_ALLOW_EMPTY : 0; iRawFlags |= q.m_bEmitZones ? EXCERPT_FLAG_EMIT_ZONES : 0; return iRawFlags; } bool MakeSnippets ( CSphString sIndex, CSphVector & dQueries, CSphString & sError ) { SnippetsRemote_t dRemoteSnippets ( dQueries ); CSphVector dDistLocal; ExcerptQuery_t & q = dQueries[0]; g_tDistLock.Lock(); DistributedIndex_t * pDist = g_hDistIndexes ( sIndex ); bool bRemote = pDist!=NULL; // hack! load_files && load_files_scattered is the 'final' call. It will report the absent files as errors. // simple load_files_scattered without load_files just omits the absent files (returns empty strings). bool bScattered = ( q.m_iLoadFiles & 2 )!=0; bool bSkipAbsentFiles = !( q.m_iLoadFiles & 1 ); if ( bRemote ) { dRemoteSnippets.m_iAgentConnectTimeout = pDist->m_iAgentConnectTimeout; dRemoteSnippets.m_iAgentQueryTimeout = pDist->m_iAgentQueryTimeout; dDistLocal = pDist->m_dLocal; dRemoteSnippets.m_dAgents.Resize ( pDist->m_dAgents.GetLength() ); ARRAY_FOREACH ( i, pDist->m_dAgents ) dRemoteSnippets.m_dAgents[i] = pDist->m_dAgents[i]; } g_tDistLock.Unlock(); if ( pDist ) { if ( pDist->m_dLocal.GetLength()!=1 ) { sError.SetSprintf ( "%s", "The distributed index for snippets must have exactly one local agent" ); return false; } if ( !q.m_iLoadFiles ) { sError.SetSprintf ( "%s", "The distributed index for snippets available only when using external files" ); return false; } sIndex = dDistLocal[0]; // no remote - roll back to simple local query if ( dRemoteSnippets.m_dAgents.GetLength()==0 ) bRemote = false; } const ServedIndex_t * pServed = g_pIndexes->GetRlockedEntry ( sIndex ); if ( !pServed || !pServed->m_bEnabled || !pServed->m_pIndex ) { sError.SetSprintf ( "unknown local index '%s' in search request", sIndex.cstr() ); if ( pServed ) pServed->Unlock(); return false; } CSphIndex * pIndex = pServed->m_pIndex; SnippetContext_t tCtx; if ( !tCtx.Setup ( pIndex, q, sError ) ) // same path for single - threaded snippets, bail out here on error { sError.SetSprintf ( "%s", sError.cstr() ); pServed->Unlock(); return false; } /////////////////// // do highlighting /////////////////// bool bOk = true; int iAbsentHead = EOF_ITEM; if ( g_iDistThreads<=1 || dQueries.GetLength()<2 ) { // boring single threaded loop ARRAY_FOREACH ( i, dQueries ) { dQueries[i].m_sRes = sphBuildExcerpt ( dQueries[i], tCtx.m_pDict, tCtx.m_tTokenizer.Ptr(), &pIndex->GetMatchSchema(), pIndex, sError, tCtx.m_tStripper.Ptr(), tCtx.m_pQueryTokenizer ); if ( !dQueries[i].m_sRes ) { bOk = false; break; } } } else { // get file sizes ARRAY_FOREACH ( i, dQueries ) { dQueries[i].m_iNext = PROCESSED_ITEM; if ( dQueries[i].m_iLoadFiles ) { struct stat st; if ( ::stat ( dQueries[i].m_sSource.cstr(), &st )<0 ) { if ( !bScattered ) { sError.SetSprintf ( "failed to stat %s: %s", dQueries[i].m_sSource.cstr(), strerror(errno) ); pServed->Unlock(); return false; } dQueries[i].m_iNext = EOF_ITEM; } dQueries[i].m_iSize = -st.st_size; // so that sort would put bigger ones first } else { dQueries[i].m_iSize = -dQueries[i].m_sSource.Length(); } dQueries[i].m_iSeq = i; } // tough jobs first if ( !bScattered ) dQueries.Sort ( bind ( &ExcerptQuery_t::m_iSize ) ); ARRAY_FOREACH ( i, dQueries ) if ( dQueries[i].m_iNext==EOF_ITEM ) { dQueries[i].m_iNext = iAbsentHead; iAbsentHead = i; if ( !bSkipAbsentFiles ) dQueries[i].m_sError.SetSprintf ( "failed to stat %s: %s", dQueries[i].m_sSource.cstr(), strerror(errno) ); } // check if all files are available locally. if ( bScattered && iAbsentHead==EOF_ITEM ) { bRemote = false; dRemoteSnippets.m_dAgents.Reset(); } if ( bRemote ) { // schedule jobs across workers (the worker is either local thread or instance, either remote agent). // simple LPT (Least Processing Time) scheduling for now // might add dynamic programming or something later if needed int iLocalPart = 1; // one instance = one worker. Or set to = g_iDistThreads, one local thread = one worker. int iRemoteAgents = dRemoteSnippets.m_dAgents.GetLength(); dRemoteSnippets.m_dWorkers.Resize ( iLocalPart + iRemoteAgents ); for ( int i=0; i dThreads ( g_iDistThreads ); for ( int i=0; iUnlock(); return bOk; } void HandleCommandExcerpt ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_EXCERPT, tReq ) ) return; ///////////////////////////// // parse and process request ///////////////////////////// const int EXCERPT_MAX_ENTRIES = 1024; // v.1.1 ExcerptQuery_t q; tReq.GetInt (); // mode field is for now reserved and ignored int iFlags = tReq.GetInt (); q.m_iRawFlags = iFlags; CSphString sIndex = tReq.GetString (); q.m_sWords = tReq.GetString (); q.m_sBeforeMatch = tReq.GetString (); q.m_sAfterMatch = tReq.GetString (); q.m_sChunkSeparator = tReq.GetString (); q.m_iLimit = tReq.GetInt (); q.m_iAround = tReq.GetInt (); if ( iVer>=0x102 ) { q.m_iLimitPassages = tReq.GetInt(); q.m_iLimitWords = tReq.GetInt(); q.m_iPassageId = tReq.GetInt(); q.m_sStripMode = tReq.GetString(); if ( q.m_sStripMode!="none" && q.m_sStripMode!="index" && q.m_sStripMode!="strip" && q.m_sStripMode!="retain" ) { tReq.SendErrorReply ( "unknown html_strip_mode=%s", q.m_sStripMode.cstr() ); return; } } q.m_bHasBeforePassageMacro = SnippetTransformPassageMacros ( q.m_sBeforeMatch, q.m_sBeforeMatchPassage ); q.m_bHasAfterPassageMacro = SnippetTransformPassageMacros ( q.m_sAfterMatch, q.m_sAfterMatchPassage ); CSphString sPassageBoundaryMode; if ( iVer>=0x103 ) q.m_sRawPassageBoundary = tReq.GetString(); q.m_bRemoveSpaces = ( iFlags & EXCERPT_FLAG_REMOVESPACES )!=0; q.m_bExactPhrase = ( iFlags & EXCERPT_FLAG_EXACTPHRASE )!=0; q.m_bUseBoundaries = ( iFlags & EXCERPT_FLAG_USEBOUNDARIES )!=0; q.m_bWeightOrder = ( iFlags & EXCERPT_FLAG_WEIGHTORDER )!=0; q.m_bHighlightQuery = ( iFlags & EXCERPT_FLAG_QUERY )!=0; q.m_bForceAllWords = ( iFlags & EXCERPT_FLAG_FORCE_ALL_WORDS )!=0; if ( iFlags & EXCERPT_FLAG_SINGLEPASSAGE ) q.m_iLimitPassages = 1; q.m_iLoadFiles = (( iFlags & EXCERPT_FLAG_LOAD_FILES )!=0)?1:0; bool bScattered = ( iFlags & EXCERPT_FLAG_FILES_SCATTERED )!=0; q.m_iLoadFiles |= bScattered?2:0; q.m_bAllowEmpty = ( iFlags & EXCERPT_FLAG_ALLOW_EMPTY )!=0; q.m_bEmitZones = ( iFlags & EXCERPT_FLAG_EMIT_ZONES )!=0; int iCount = tReq.GetInt (); if ( iCount<=0 || iCount>EXCERPT_MAX_ENTRIES ) { tReq.SendErrorReply ( "invalid entries count %d", iCount ); return; } q.m_iPassageBoundary = sphGetPassageBoundary ( q.m_sRawPassageBoundary ); CSphString sError; if ( !sphCheckOptionsSPZ ( q, q.m_sRawPassageBoundary, sError ) ) { tReq.SendErrorReply ( "%s", sError.cstr() ); return; } CSphVector dQueries ( iCount ); ARRAY_FOREACH ( i, dQueries ) { dQueries[i] = q; // copy settings dQueries[i].m_sSource = tReq.GetString (); // fetch data if ( tReq.GetError() ) { tReq.SendErrorReply ( "invalid or truncated request" ); return; } } if ( !MakeSnippets ( sIndex, dQueries, sError ) ) { tReq.SendErrorReply ( "%s", sError.cstr() ); return; } //////////////// // serve result //////////////// int iRespLen = 0; ARRAY_FOREACH ( i, dQueries ) { // handle errors if ( !dQueries[i].m_sRes ) { if ( !bScattered ) { tReq.SendErrorReply ( "highlighting failed: %s", dQueries[i].m_sError.cstr() ); ARRAY_FOREACH ( j, dQueries ) SafeDeleteArray ( dQueries[j].m_sRes ); return; } iRespLen += 4; } else iRespLen += 4 + strlen ( dQueries[i].m_sRes ); } NetOutputBuffer_c tOut ( iSock ); tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_EXCERPT ); tOut.SendInt ( iRespLen ); ARRAY_FOREACH ( i, dQueries ) { if ( dQueries[i].m_sRes ) { tOut.SendString ( dQueries[i].m_sRes ); SafeDeleteArray ( dQueries[i].m_sRes ); } else tOut.SendString ( "" ); } tOut.Flush (); assert ( tOut.GetError()==true || tOut.GetSentCount()==iRespLen+8 ); } ///////////////////////////////////////////////////////////////////////////// // KEYWORDS HANDLER ///////////////////////////////////////////////////////////////////////////// void HandleCommandKeywords ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_KEYWORDS, tReq ) ) return; CSphString sQuery = tReq.GetString (); CSphString sIndex = tReq.GetString (); bool bGetStats = !!tReq.GetInt (); const ServedIndex_t * pIndex = g_pIndexes->GetRlockedEntry ( sIndex ); if ( !pIndex ) { tReq.SendErrorReply ( "unknown local index '%s' in search request", sIndex.cstr() ); return; } CSphString sError; CSphVector < CSphKeywordInfo > dKeywords; if ( !pIndex->m_pIndex->GetKeywords ( dKeywords, sQuery.cstr (), bGetStats, sError ) ) { tReq.SendErrorReply ( "error generating keywords: %s", sError.cstr () ); pIndex->Unlock(); return; } pIndex->Unlock(); int iRespLen = 4; ARRAY_FOREACH ( i, dKeywords ) { iRespLen += 4 + strlen ( dKeywords[i].m_sTokenized.cstr () ); iRespLen += 4 + strlen ( dKeywords[i].m_sNormalized.cstr () ); if ( bGetStats ) iRespLen += 8; } NetOutputBuffer_c tOut ( iSock ); tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_KEYWORDS ); tOut.SendInt ( iRespLen ); tOut.SendInt ( dKeywords.GetLength () ); ARRAY_FOREACH ( i, dKeywords ) { tOut.SendString ( dKeywords[i].m_sTokenized.cstr () ); tOut.SendString ( dKeywords[i].m_sNormalized.cstr () ); if ( bGetStats ) { tOut.SendInt ( dKeywords[i].m_iDocs ); tOut.SendInt ( dKeywords[i].m_iHits ); } } tOut.Flush (); assert ( tOut.GetError()==true || tOut.GetSentCount()==iRespLen+8 ); } ///////////////////////////////////////////////////////////////////////////// // UPDATES HANDLER ///////////////////////////////////////////////////////////////////////////// struct UpdateRequestBuilder_t : public IRequestBuilder_t { explicit UpdateRequestBuilder_t ( const CSphAttrUpdate & pUpd ) : m_tUpd ( pUpd ) {} virtual void BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int ) const; protected: const CSphAttrUpdate & m_tUpd; }; struct UpdateReplyParser_t : public IReplyParser_t { explicit UpdateReplyParser_t ( int * pUpd ) : m_pUpdated ( pUpd ) {} virtual bool ParseReply ( MemInputBuffer_c & tReq, AgentConn_t &, int ) const { *m_pUpdated += tReq.GetDword (); return true; } protected: int * m_pUpdated; }; void UpdateRequestBuilder_t::BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int ) const { int iReqSize = 4+strlen(sIndexes); // indexes string iReqSize += 4; // attrs array len, data ARRAY_FOREACH ( i, m_tUpd.m_dAttrs ) iReqSize += 8 + strlen ( m_tUpd.m_dAttrs[i].m_sName.cstr() ); iReqSize += 4; // number of updates iReqSize += 8*m_tUpd.m_dDocids.GetLength() + 4*m_tUpd.m_dPool.GetLength(); // 64bit ids, 32bit values // header tOut.SendDword ( SPHINX_SEARCHD_PROTO ); tOut.SendWord ( SEARCHD_COMMAND_UPDATE ); tOut.SendWord ( VER_COMMAND_UPDATE ); tOut.SendInt ( iReqSize ); tOut.SendString ( sIndexes ); tOut.SendInt ( m_tUpd.m_dAttrs.GetLength() ); ARRAY_FOREACH ( i, m_tUpd.m_dAttrs ) { tOut.SendString ( m_tUpd.m_dAttrs[i].m_sName.cstr() ); tOut.SendInt ( ( m_tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT32SET || m_tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ) ? 1 : 0 ); } tOut.SendInt ( m_tUpd.m_dDocids.GetLength() ); ARRAY_FOREACH ( i, m_tUpd.m_dDocids ) { int iHead = m_tUpd.m_dRowOffset[i]; int iTail = ( (i+1)m_pIndex || !pServed->m_bEnabled ) { dFails.Submit ( sIndex, "index not available" ); return; } CSphString sError; int iUpd = pServed->m_pIndex->UpdateAttributes ( tUpd, -1, sError ); if ( iUpd<0 ) { dFails.Submit ( sIndex, sError.cstr() ); } else { iUpdated += iUpd; iSuccesses++; } } static const ServedIndex_t * UpdateGetLockedIndex ( const CSphString & sName, bool bMvaUpdate ) { const ServedIndex_t * pLocked = g_pIndexes->GetRlockedEntry ( sName ); if ( !pLocked ) return NULL; if ( !( bMvaUpdate && pLocked->m_bRT ) ) return pLocked; pLocked->Unlock(); return g_pIndexes->GetWlockedEntry ( sName ); } void HandleCommandUpdate ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_UPDATE, tReq ) ) return; // parse request CSphString sIndexes = tReq.GetString (); CSphAttrUpdate tUpd; CSphVector dMva; bool bMvaUpdate = false; tUpd.m_dAttrs.Resize ( tReq.GetDword() ); // FIXME! check this ARRAY_FOREACH ( i, tUpd.m_dAttrs ) { tUpd.m_dAttrs[i].m_sName = tReq.GetString (); tUpd.m_dAttrs[i].m_sName.ToLower (); tUpd.m_dAttrs[i].m_eAttrType = SPH_ATTR_INTEGER; if ( iVer>=0x102 ) { if ( tReq.GetDword() ) { tUpd.m_dAttrs[i].m_eAttrType = SPH_ATTR_UINT32SET; bMvaUpdate = true; } } } int iNumUpdates = tReq.GetInt (); // FIXME! check this tUpd.m_dDocids.Reserve ( iNumUpdates ); tUpd.m_dRowOffset.Reserve ( iNumUpdates ); for ( int i=0; i=0x101 ) ? tReq.GetUint64 () : tReq.GetDword (); tUpd.m_dDocids.Add ( (SphDocID_t)uDocid ); // FIXME! check this tUpd.m_dRowOffset.Add ( tUpd.m_dPool.GetLength() ); ARRAY_FOREACH ( iAttr, tUpd.m_dAttrs ) { if ( tUpd.m_dAttrs[iAttr].m_eAttrType==SPH_ATTR_UINT32SET ) { DWORD uCount = tReq.GetDword (); if ( !uCount ) { tUpd.m_dPool.Add ( 0 ); continue; } dMva.Resize ( uCount ); for ( DWORD j=0; j mva64 } } else { tUpd.m_dPool.Add ( tReq.GetDword() ); } } } if ( tReq.GetError() ) { tReq.SendErrorReply ( "invalid or truncated request" ); return; } // check index names CSphVector dIndexNames; ParseIndexList ( sIndexes, dIndexNames ); if ( !dIndexNames.GetLength() ) { tReq.SendErrorReply ( "no valid indexes in update request" ); return; } CSphVector dDistributed ( dIndexNames.GetLength() ); // lock safe storage for distributed indexes ARRAY_FOREACH ( i, dIndexNames ) { if ( !g_pIndexes->Exists ( dIndexNames[i] ) ) { // search amongst distributed and copy for further processing g_tDistLock.Lock(); const DistributedIndex_t * pDistIndex = g_hDistIndexes ( dIndexNames[i] ); if ( pDistIndex ) { dDistributed[i] = *pDistIndex; } g_tDistLock.Unlock(); if ( pDistIndex ) continue; else { tReq.SendErrorReply ( "unknown index '%s' in update request", dIndexNames[i].cstr() ); return; } } } // do update SearchFailuresLog_c dFails; int iSuccesses = 0; int iUpdated = 0; ARRAY_FOREACH ( iIdx, dIndexNames ) { const char * sReqIndex = dIndexNames[iIdx].cstr(); const ServedIndex_t * pLocked = UpdateGetLockedIndex ( sReqIndex, bMvaUpdate ); if ( pLocked ) { DoCommandUpdate ( sReqIndex, tUpd, iSuccesses, iUpdated, dFails, pLocked ); pLocked->Unlock(); } else { assert ( dDistributed[iIdx].m_dLocal.GetLength() || dDistributed[iIdx].m_dAgents.GetLength() ); CSphVector& dLocal = dDistributed[iIdx].m_dLocal; ARRAY_FOREACH ( i, dLocal ) { const char * sLocal = dLocal[i].cstr(); const ServedIndex_t * pServed = UpdateGetLockedIndex ( sLocal, bMvaUpdate ); DoCommandUpdate ( sLocal, tUpd, iSuccesses, iUpdated, dFails, pServed ); if ( pServed ) pServed->Unlock(); } } // update remote agents if ( dDistributed[iIdx].m_dAgents.GetLength() ) { DistributedIndex_t & tDist = dDistributed[iIdx]; CSphVector dAgents ( tDist.m_dAgents.GetLength() ); ARRAY_FOREACH ( i, dAgents ) dAgents[i] = tDist.m_dAgents[i]; // connect to remote agents and query them ConnectToRemoteAgents ( dAgents, false ); UpdateRequestBuilder_t tReqBuilder ( tUpd ); int iRemote = QueryRemoteAgents ( dAgents, tDist.m_iAgentConnectTimeout, tReqBuilder, NULL ); // FIXME? profile update time too? if ( iRemote ) { UpdateReplyParser_t tParser ( &iUpdated ); iSuccesses += WaitForRemoteAgents ( dAgents, tDist.m_iAgentQueryTimeout, tParser, NULL ); // FIXME? profile update time too? } } } // serve reply to client StrBuf_t sReport; dFails.BuildReport ( sReport ); if ( !iSuccesses ) { tReq.SendErrorReply ( "%s", sReport.cstr() ); return; } NetOutputBuffer_c tOut ( iSock ); if ( dFails.IsEmpty() ) { tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_UPDATE ); tOut.SendInt ( 4 ); } else { tOut.SendWord ( SEARCHD_WARNING ); tOut.SendWord ( VER_COMMAND_UPDATE ); tOut.SendInt ( 8 + strlen ( sReport.cstr() ) ); tOut.SendString ( sReport.cstr() ); } tOut.SendInt ( iUpdated ); tOut.Flush (); } ////////////////////////////////////////////////////////////////////////// // STATUS HANDLER ////////////////////////////////////////////////////////////////////////// static inline void FormatMsec ( CSphString & sOut, int64_t tmTime ) { sOut.SetSprintf ( "%d.%03d", (int)( tmTime/1000000 ), (int)( (tmTime%1000000)/1000 ) ); } void BuildStatus ( CSphVector & dStatus ) { assert ( g_pStats ); const char * FMT64 = INT64_FMT; const char * OFF = "OFF"; const int64_t iQueriesDiv = Max ( g_pStats->m_iQueries, 1 ); const int64_t iDistQueriesDiv = Max ( g_pStats->m_iDistQueries, 1 ); // FIXME? non-transactional!!! dStatus.Add ( "uptime" ); dStatus.Add().SetSprintf ( "%u", (DWORD)time(NULL)-g_pStats->m_uStarted ); dStatus.Add ( "connections" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iConnections ); dStatus.Add ( "maxed_out" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iMaxedOut ); dStatus.Add ( "command_search" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_SEARCH] ); dStatus.Add ( "command_excerpt" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_EXCERPT] ); dStatus.Add ( "command_update" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_UPDATE] ); dStatus.Add ( "command_keywords" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_KEYWORDS] ); dStatus.Add ( "command_persist" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_PERSIST] ); dStatus.Add ( "command_status" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_STATUS] ); dStatus.Add ( "command_flushattrs" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iCommandCount[SEARCHD_COMMAND_FLUSHATTRS] ); dStatus.Add ( "agent_connect" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iAgentConnect ); dStatus.Add ( "agent_retry" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iAgentRetry ); dStatus.Add ( "queries" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iQueries ); dStatus.Add ( "dist_queries" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iDistQueries ); g_tDistLock.Lock(); g_hDistIndexes.IterateStart(); while ( g_hDistIndexes.IterateNext() ) { const char * sIdx = g_hDistIndexes.IterateGetKey().cstr(); CSphVector & dAgents = g_hDistIndexes.IterateGet().m_dAgents; ARRAY_FOREACH ( i, dAgents ) { int iIndex = dAgents[i].m_iStatsIndex; if ( iIndex<0 || iIndex>=STATS_MAX_AGENTS ) continue; AgentStats_t & tStats = g_pStats->m_dAgentStats[iIndex]; dStatus.Add().SetSprintf ( "ag_%s_%d_query_timeouts", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iTimeoutsQuery ); dStatus.Add().SetSprintf ( "ag_%s_%d_connect_timeouts", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iTimeoutsConnect ); dStatus.Add().SetSprintf ( "ag_%s_%d_connect_failures", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iConnectFailures ); dStatus.Add().SetSprintf ( "ag_%s_%d_network_errors", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iNetworkErrors ); dStatus.Add().SetSprintf ( "ag_%s_%d_wrong_replies", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iWrongReplies ); dStatus.Add().SetSprintf ( "ag_%s_%d_unexpected_closings", sIdx, i ); dStatus.Add().SetSprintf ( FMT64, tStats.m_iUnexpectedClose ); } } g_tDistLock.Unlock(); dStatus.Add ( "query_wall" ); FormatMsec ( dStatus.Add(), g_pStats->m_iQueryTime ); dStatus.Add ( "query_cpu" ); if ( g_bCpuStats ) FormatMsec ( dStatus.Add(), g_pStats->m_iQueryCpuTime ); else dStatus.Add() = OFF; dStatus.Add ( "dist_wall" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistWallTime ); dStatus.Add ( "dist_local" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistLocalTime ); dStatus.Add ( "dist_wait" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistWaitTime ); if ( g_bIOStats ) { dStatus.Add ( "query_reads" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iDiskReads ); dStatus.Add ( "query_readkb" ); dStatus.Add().SetSprintf ( FMT64, g_pStats->m_iDiskReadBytes/1024 ); dStatus.Add ( "query_readtime" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDiskReadTime ); } else { dStatus.Add ( "query_reads" ); dStatus.Add() = OFF; dStatus.Add ( "query_readkb" ); dStatus.Add() = OFF; dStatus.Add ( "query_readtime" ); dStatus.Add() = OFF; } dStatus.Add ( "avg_query_wall" ); FormatMsec ( dStatus.Add(), g_pStats->m_iQueryTime / iQueriesDiv ); dStatus.Add ( "avg_query_cpu" ); if ( g_bCpuStats ) FormatMsec ( dStatus.Add(), g_pStats->m_iQueryCpuTime / iQueriesDiv ); else dStatus.Add ( OFF ); dStatus.Add ( "avg_dist_wall" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistWallTime / iDistQueriesDiv ); dStatus.Add ( "avg_dist_local" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistLocalTime / iDistQueriesDiv ); dStatus.Add ( "avg_dist_wait" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDistWaitTime / iDistQueriesDiv ); if ( g_bIOStats ) { dStatus.Add ( "avg_query_reads" ); dStatus.Add().SetSprintf ( "%.1f", (float)( g_pStats->m_iDiskReads*10/iQueriesDiv )/10.0f ); dStatus.Add ( "avg_query_readkb" ); dStatus.Add().SetSprintf ( "%.1f", (float)( g_pStats->m_iDiskReadBytes/iQueriesDiv )/1024.0f ); dStatus.Add ( "avg_query_readtime" ); FormatMsec ( dStatus.Add(), g_pStats->m_iDiskReadTime/iQueriesDiv ); } else { dStatus.Add ( "avg_query_reads" ); dStatus.Add() = OFF; dStatus.Add ( "avg_query_readkb" ); dStatus.Add() = OFF; dStatus.Add ( "avg_query_readtime" ); dStatus.Add() = OFF; } } void BuildMeta ( CSphVector & dStatus, const CSphQueryResultMeta & tMeta ) { if ( !tMeta.m_sError.IsEmpty() ) { dStatus.Add ( "error" ); dStatus.Add ( tMeta.m_sError ); } if ( !tMeta.m_sWarning.IsEmpty() ) { dStatus.Add ( "warning" ); dStatus.Add ( tMeta.m_sWarning ); } dStatus.Add ( "total" ); dStatus.Add().SetSprintf ( "%d", tMeta.m_iMatches ); dStatus.Add ( "total_found" ); dStatus.Add().SetSprintf ( INT64_FMT, tMeta.m_iTotalMatches ); dStatus.Add ( "time" ); dStatus.Add().SetSprintf ( "%d.%03d", tMeta.m_iQueryTime/1000, tMeta.m_iQueryTime%1000 ); int iWord = 0; tMeta.m_hWordStats.IterateStart(); while ( tMeta.m_hWordStats.IterateNext() ) { const CSphQueryResultMeta::WordStat_t & tStat = tMeta.m_hWordStats.IterateGet(); dStatus.Add().SetSprintf ( "keyword[%d]", iWord ); dStatus.Add ( tMeta.m_hWordStats.IterateGetKey() ); dStatus.Add().SetSprintf ( "docs[%d]", iWord ); dStatus.Add().SetSprintf ( INT64_FMT, tStat.m_iDocs ); dStatus.Add().SetSprintf ( "hits[%d]", iWord ); dStatus.Add().SetSprintf ( INT64_FMT, tStat.m_iHits ); iWord++; } } void HandleCommandStatus ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_STATUS, tReq ) ) return; if ( !g_pStats ) { tReq.SendErrorReply ( "performance counters disabled" ); return; } CSphVector dStatus; BuildStatus ( dStatus ); int iRespLen = 8; // int rows, int cols ARRAY_FOREACH ( i, dStatus ) iRespLen += 4 + strlen ( dStatus[i].cstr() ); NetOutputBuffer_c tOut ( iSock ); tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_STATUS ); tOut.SendInt ( iRespLen ); tOut.SendInt ( dStatus.GetLength()/2 ); // rows tOut.SendInt ( 2 ); // cols ARRAY_FOREACH ( i, dStatus ) tOut.SendString ( dStatus[i].cstr() ); tOut.Flush (); assert ( tOut.GetError()==true || tOut.GetSentCount()==8+iRespLen ); } ////////////////////////////////////////////////////////////////////////// // FLUSH HANDLER ////////////////////////////////////////////////////////////////////////// void HandleCommandFlush ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_FLUSHATTRS, tReq ) ) return; // only if flushes are enabled if ( g_iAttrFlushPeriod<=0 ) { // flushes are disabled sphLogDebug ( "attrflush: attr_flush_period<=0, command ignored" ); } else if ( g_eWorkers==MPM_NONE ) { // --console mode, no async thread/process to handle the check sphLogDebug ( "attrflush: --console mode, command ignored" ); } else { // force a check in head process, and wait it until completes // FIXME! semi active wait.. sphLogDebug ( "attrflush: forcing check, tag=%d", g_pFlush->m_iFlushTag ); g_pFlush->m_bForceCheck = true; while ( g_pFlush->m_bForceCheck ) sphSleepMsec ( 1 ); // if we are flushing now, wait until flush completes while ( g_pFlush->m_bFlushing ) sphSleepMsec ( 10 ); sphLogDebug ( "attrflush: check finished, tag=%d", g_pFlush->m_iFlushTag ); } // return last flush tag, just for the fun of it NetOutputBuffer_c tOut ( iSock ); tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_FLUSHATTRS ); tOut.SendInt ( 4 ); // resplen, 1 dword tOut.SendInt ( g_pFlush->m_iFlushTag ); tOut.Flush (); assert ( tOut.GetError()==true || tOut.GetSentCount()==12 ); // 8+resplen } ///////////////////////////////////////////////////////////////////////////// // GENERAL HANDLER ///////////////////////////////////////////////////////////////////////////// #define THD_STATE(_state) { if ( pThd ) pThd->m_eThdState = _state; } void HandleCommandSphinxql ( int iSock, int iVer, InputBuffer_c & tReq ); // definition is below void HandleClientSphinx ( int iSock, const char * sClientIP, ThdDesc_t * pThd ) { MEMORY ( SPH_MEM_HANDLE_NONSQL ); THD_STATE ( THD_HANDSHAKE ); bool bPersist = false; int iTimeout = g_iReadTimeout; // wait 5 sec until first command NetInputBuffer_c tBuf ( iSock ); // send my version DWORD uServer = htonl ( SPHINX_SEARCHD_PROTO ); if ( sphSockSend ( iSock, (char*)&uServer, sizeof(DWORD) )!=sizeof(DWORD) ) { sphWarning ( "failed to send server version (client=%s)", sClientIP ); return; } // get client version and request tBuf.ReadFrom ( 4 ); // FIXME! magic int iMagic = tBuf.GetInt (); // client version is for now unused sphLogDebugv ( "conn %s: got handshake, major v.%d, err %d", sClientIP, iMagic, (int)tBuf.GetError() ); if ( tBuf.GetError() ) { sphLogDebugv ( "conn %s: exiting on handshake error", sClientIP ); return; } int iPconnIdle = 0; do { // in "persistent connection" mode, we want interruptible waits // so that the worker child could be forcibly restarted // // currently, the only signal allowed to interrupt this read is SIGTERM // letting SIGHUP interrupt causes trouble under query/rotation pressure // see sphSockRead() and ReadFrom() for details THD_STATE ( THD_NET_READ ); bool bCommand = tBuf.ReadFrom ( 8, iTimeout, bPersist ); // on SIGTERM, bail unconditionally and immediately, at all times if ( !bCommand && g_bGotSigterm ) { sphLogDebugv ( "conn %s: bailing on SIGTERM", sClientIP ); break; } // on SIGHUP vs pconn, bail if a pconn was idle for 1 sec if ( bPersist && !bCommand && g_bGotSighup && sphSockPeekErrno()==ETIMEDOUT ) { sphLogDebugv ( "conn %s: bailing idle pconn on SIGHUP", sClientIP ); break; } // on pconn that was idle for 300 sec (client_timeout), bail if ( bPersist && !bCommand && sphSockPeekErrno()==ETIMEDOUT ) { iPconnIdle += iTimeout; if ( iPconnIdle>=g_iClientTimeout ) { sphLogDebugv ( "conn %s: bailing idle pconn on client_timeout", sClientIP ); break; } continue; } else iPconnIdle = 0; // on any other signals vs pconn, ignore and keep looping // (redundant for now, as the only allowed interruption is SIGTERM, but.. let's keep it) if ( bPersist && !bCommand && tBuf.IsIntr() ) continue; // okay, signal related mess should be over, try to parse the command // (but some other socket error still might had happened, so beware) int iCommand = tBuf.GetWord (); int iCommandVer = tBuf.GetWord (); int iLength = tBuf.GetInt (); if ( tBuf.GetError() ) { // under high load, there can be pretty frequent accept() vs connect() timeouts // lets avoid agent log flood // // sphWarning ( "failed to receive client version and request (client=%s, error=%s)", sClientIP, sphSockError() ); sphLogDebugv ( "conn %s: bailing on failed request header (sockerr=%s)", sClientIP, sphSockError() ); return; } // check request if ( iCommand<0 || iCommand>=SEARCHD_COMMAND_TOTAL || iLength<0 || iLength>g_iMaxPacketSize ) { // unknown command, default response header tBuf.SendErrorReply ( "invalid command (code=%d, len=%d)", iCommand, iLength ); // if request length is insane, low level comm is broken, so we bail out if ( iLength<0 || iLength>g_iMaxPacketSize ) sphWarning ( "ill-formed client request (length=%d out of bounds)", iLength ); // if command is insane, low level comm is broken, so we bail out if ( iCommand<0 || iCommand>=SEARCHD_COMMAND_TOTAL ) sphWarning ( "ill-formed client request (command=%d, SEARCHD_COMMAND_TOTAL=%d)", iCommand, SEARCHD_COMMAND_TOTAL ); return; } // count commands if ( g_pStats && iCommand>=0 && iCommandm_iCommandCount[iCommand]++; g_tStatsMutex.Unlock(); } // get request body assert ( iLength>=0 && iLength<=g_iMaxPacketSize ); if ( iLength && !tBuf.ReadFrom ( iLength ) ) { sphWarning ( "failed to receive client request body (client=%s, exp=%d, error='%s')", sClientIP, iLength, sphSockError() ); return; } // set on query guard CrashQuery_t tCrashQuery; tCrashQuery.m_pQuery = tBuf.GetBufferPtr(); tCrashQuery.m_iSize = iLength; tCrashQuery.m_bMySQL = false; tCrashQuery.m_uCMD = (WORD)iCommand; tCrashQuery.m_uVer = (WORD)iCommandVer; SphCrashLogger_c::SetLastQuery ( tCrashQuery ); // handle known commands assert ( iCommand>=0 && iCommandm_sCommand = g_dApiCommands[iCommand]; THD_STATE ( THD_QUERY ); sphLogDebugv ( "conn %s: got command %d, handling", sClientIP, iCommand ); switch ( iCommand ) { case SEARCHD_COMMAND_SEARCH: HandleCommandSearch ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_EXCERPT: HandleCommandExcerpt ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_KEYWORDS: HandleCommandKeywords ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_UPDATE: HandleCommandUpdate ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_PERSIST: bPersist = ( tBuf.GetInt()!=0 ); iTimeout = 1; sphLogDebugv ( "conn %s: pconn is now %s", sClientIP, bPersist ? "on" : "off" ); break; case SEARCHD_COMMAND_STATUS: HandleCommandStatus ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_FLUSHATTRS:HandleCommandFlush ( iSock, iCommandVer, tBuf ); break; case SEARCHD_COMMAND_SPHINXQL: HandleCommandSphinxql ( iSock, iCommandVer, tBuf ); break; default: assert ( 0 && "INTERNAL ERROR: unhandled command" ); break; } // set off query guard SphCrashLogger_c::SetLastQuery ( CrashQuery_t() ); } while ( bPersist ); sphLogDebugv ( "conn %s: exiting", sClientIP ); } ////////////////////////////////////////////////////////////////////////// // MYSQLD PRETENDER ////////////////////////////////////////////////////////////////////////// // our copy of enum_field_types // we can't rely on mysql_com.h because it might be unavailable // // MYSQL_TYPE_DECIMAL = 0 // MYSQL_TYPE_TINY = 1 // MYSQL_TYPE_SHORT = 2 // MYSQL_TYPE_LONG = 3 // MYSQL_TYPE_FLOAT = 4 // MYSQL_TYPE_DOUBLE = 5 // MYSQL_TYPE_NULL = 6 // MYSQL_TYPE_TIMESTAMP = 7 // MYSQL_TYPE_LONGLONG = 8 // MYSQL_TYPE_INT24 = 9 // MYSQL_TYPE_DATE = 10 // MYSQL_TYPE_TIME = 11 // MYSQL_TYPE_DATETIME = 12 // MYSQL_TYPE_YEAR = 13 // MYSQL_TYPE_NEWDATE = 14 // MYSQL_TYPE_VARCHAR = 15 // MYSQL_TYPE_BIT = 16 // MYSQL_TYPE_NEWDECIMAL = 246 // MYSQL_TYPE_ENUM = 247 // MYSQL_TYPE_SET = 248 // MYSQL_TYPE_TINY_BLOB = 249 // MYSQL_TYPE_MEDIUM_BLOB = 250 // MYSQL_TYPE_LONG_BLOB = 251 // MYSQL_TYPE_BLOB = 252 // MYSQL_TYPE_VAR_STRING = 253 // MYSQL_TYPE_STRING = 254 // MYSQL_TYPE_GEOMETRY = 255 enum MysqlColumnType_e { MYSQL_COL_DECIMAL = 0, MYSQL_COL_LONG = 3, MYSQL_COL_FLOAT = 4, MYSQL_COL_LONGLONG = 8, MYSQL_COL_STRING = 254 }; void SendMysqlFieldPacket ( NetOutputBuffer_c & tOut, BYTE uPacketID, const char * sCol, MysqlColumnType_e eType ) { const char * sDB = ""; const char * sTable = ""; int iLen = 17 + MysqlPackedLen(sDB) + 2*( MysqlPackedLen(sTable) + MysqlPackedLen(sCol) ); int iColLen = 0; switch ( eType ) { case MYSQL_COL_DECIMAL: iColLen = 20; break; case MYSQL_COL_LONG: iColLen = 11; break; case MYSQL_COL_FLOAT: iColLen = 20; break; case MYSQL_COL_LONGLONG: iColLen = 20; break; case MYSQL_COL_STRING: iColLen = 255; break; } tOut.SendLSBDword ( (uPacketID<<24) + iLen ); tOut.SendMysqlString ( "def" ); // catalog tOut.SendMysqlString ( sDB ); // db tOut.SendMysqlString ( sTable ); // table tOut.SendMysqlString ( sTable ); // org_table tOut.SendMysqlString ( sCol ); // name tOut.SendMysqlString ( sCol ); // org_name tOut.SendByte ( 12 ); // filler, must be 12 (following pseudo-string length) tOut.SendByte ( 8 ); // charset_nr, 8 is latin1 tOut.SendByte ( 0 ); // charset_nr tOut.SendLSBDword ( iColLen ); // length tOut.SendByte ( BYTE(eType) ); // type (0=decimal) tOut.SendWord ( 0 ); // flags tOut.SendByte ( 0 ); // decimals tOut.SendWord ( 0 ); // filler } // from mysqld_error.h enum MysqlErrors_e { MYSQL_ERR_UNKNOWN_COM_ERROR = 1047, MYSQL_ERR_SERVER_SHUTDOWN = 1053, MYSQL_ERR_PARSE_ERROR = 1064, MYSQL_ERR_FIELD_SPECIFIED_TWICE = 1110, MYSQL_ERR_NO_SUCH_TABLE = 1146 }; void SendMysqlErrorPacket ( NetOutputBuffer_c & tOut, BYTE uPacketID, const char * sStmt, const char * sError, MysqlErrors_e iErr=MYSQL_ERR_PARSE_ERROR ) { if ( sError==NULL ) sError = "(null)"; LogSphinxqlError ( sStmt, sError ); int iErrorLen = strlen(sError)+1; // including the trailing zero int iLen = 9 + iErrorLen; int iError = iErr; // pretend to be mysql syntax error for now // send packet header tOut.SendLSBDword ( (uPacketID<<24) + iLen ); tOut.SendByte ( 0xff ); // field count, always 0xff for error packet tOut.SendByte ( (BYTE)( iError & 0xff ) ); tOut.SendByte ( (BYTE)( iError>>8 ) ); // send sqlstate (1 byte marker, 5 byte state) switch ( iErr ) { case MYSQL_ERR_SERVER_SHUTDOWN: case MYSQL_ERR_UNKNOWN_COM_ERROR: tOut.SendBytes ( "#08S01", 6 ); break; case MYSQL_ERR_NO_SUCH_TABLE: tOut.SendBytes ( "#42S02", 6 ); break; default: tOut.SendBytes ( "#42000", 6 ); break; } // send error message tOut.SendBytes ( sError, iErrorLen ); } void SendMysqlErrorPacketEx ( NetOutputBuffer_c & tOut, BYTE uPacketID, MysqlErrors_e iErr, const char * sTemplate, ... ) { char sBuf[1024]; va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf, sizeof(sBuf), sTemplate, ap ); va_end ( ap ); SendMysqlErrorPacket ( tOut, uPacketID, NULL, sBuf, iErr ); } void SendMysqlEofPacket ( NetOutputBuffer_c & tOut, BYTE uPacketID, int iWarns, bool bMoreResults=false ) { if ( iWarns<0 ) iWarns = 0; if ( iWarns>65535 ) iWarns = 65535; if ( bMoreResults ) #if USE_MYSQL iWarns |= ( SERVER_MORE_RESULTS_EXISTS<<16 ); #else iWarns = iWarns; #endif tOut.SendLSBDword ( (uPacketID<<24) + 5 ); tOut.SendByte ( 0xfe ); tOut.SendLSBDword ( iWarns ); // N warnings, 0 status } void SendMysqlOkPacket ( NetOutputBuffer_c & tOut, BYTE uPacketID, int iAffectedRows=0, int iWarns=0, const char * sMessage=NULL ) { DWORD iInsert_id = 0; char sVarLen[20] = {0}; // max 18 for packed number, +1 more just for fun void * pBuf = sVarLen; pBuf = MysqlPack ( pBuf, iAffectedRows ); pBuf = MysqlPack ( pBuf, iInsert_id ); int iLen = (char *) pBuf - sVarLen; int iMsgLen = 0; if ( sMessage ) iMsgLen = strlen(sMessage) + 1; // FIXME! does or doesn't the trailing zero necessary in Ok packet? tOut.SendLSBDword ( (uPacketID<<24) + iLen + iMsgLen + 5); tOut.SendByte ( 0 ); // ok packet tOut.SendBytes ( sVarLen, iLen ); // packed affected rows & insert_id if ( iWarns<0 ) iWarns = 0; if ( iWarns>65535 ) iWarns = 65535; DWORD uWarnStatus = iWarns<<16; tOut.SendLSBDword ( uWarnStatus ); // N warnings, 0 status if ( iMsgLen > 0 ) tOut.SendBytes ( sMessage, iMsgLen ); } struct CmpColumns_fn { inline bool IsLess ( const CSphString & a, const CSphString & b ) const { return CmpString ( a, b )<0; } }; void HandleMysqlInsert ( const SqlStmt_t & tStmt, NetOutputBuffer_c & tOut, BYTE uPacketID, bool bReplace, bool bCommit ) { MEMORY ( SPH_MEM_INSERT_SQL ); CSphString sError; // get that index const ServedIndex_t * pServed = g_pIndexes->GetRlockedEntry ( tStmt.m_sIndex ); if ( !pServed ) { sError.SetSprintf ( "no such index '%s'", tStmt.m_sIndex.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } if ( !pServed->m_bRT || !pServed->m_bEnabled ) { pServed->Unlock(); sError.SetSprintf ( "index '%s' does not support INSERT (enabled=%d)", tStmt.m_sIndex.cstr(), pServed->m_bEnabled ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } ISphRtIndex * pIndex = dynamic_cast ( pServed->m_pIndex ); // FIXME? remove dynamic_cast? assert ( pIndex ); // get schema, check values count const CSphSchema & tSchema = pIndex->GetInternalSchema(); int iSchemaSz = tSchema.GetAttrsCount() + tSchema.m_dFields.GetLength() + 1; int iExp = tStmt.m_iSchemaSz; int iGot = tStmt.m_dInsertValues.GetLength(); if ( !tStmt.m_dInsertSchema.GetLength() && ( iSchemaSz!=tStmt.m_iSchemaSz ) ) { pServed->Unlock(); sError.SetSprintf ( "column count does not match schema (expected %d, got %d)", iSchemaSz, iGot ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } if ( ( iGot % iExp )!=0 ) { pServed->Unlock(); sError.SetSprintf ( "column count does not match value count (expected %d, got %d)", iExp, iGot ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } CSphVector dAttrSchema ( tSchema.GetAttrsCount() ); CSphVector dFieldSchema ( tSchema.m_dFields.GetLength() ); int iIdIndex = 0; if ( !tStmt.m_dInsertSchema.GetLength() ) { // no columns list, use index schema ARRAY_FOREACH ( i, dFieldSchema ) dFieldSchema[i] = i+1; int iFields = dFieldSchema.GetLength(); ARRAY_FOREACH ( j, dAttrSchema ) dAttrSchema[j] = j+iFields+1; } else { // got a list of columns, check for 1) existance, 2) dupes CSphVector dCheck = tStmt.m_dInsertSchema; ARRAY_FOREACH ( i, dCheck ) // OPTIMIZE! GetAttrIndex and GetFieldIndex use the linear searching. M.b. hash instead? if ( dCheck[i]!="id" && tSchema.GetAttrIndex ( dCheck[i].cstr() )==-1 && tSchema.GetFieldIndex ( dCheck[i].cstr() )==-1 ) { pServed->Unlock(); sError.SetSprintf ( "unknown column: '%s'", dCheck[i].cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr(), MYSQL_ERR_PARSE_ERROR ); return; } dCheck.Sort ( CmpColumns_fn() ); ARRAY_FOREACH ( i, dCheck ) if ( i>0 && dCheck[i-1]==dCheck[i] ) { pServed->Unlock(); sError.SetSprintf ( "column '%s' specified twice", dCheck[i].cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr(), MYSQL_ERR_FIELD_SPECIFIED_TWICE ); return; } // hash column list // OPTIMIZE! hash index columns once (!) instead SmallStringHash_T dInsertSchema; ARRAY_FOREACH ( i, tStmt.m_dInsertSchema ) dInsertSchema.Add ( i, tStmt.m_dInsertSchema[i] ); // get id index if ( !dInsertSchema.Exists("id") ) { pServed->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "column list must contain an 'id' column" ); return; } iIdIndex = dInsertSchema["id"]; // map fields bool bIdDupe = false; ARRAY_FOREACH ( i, dFieldSchema ) { if ( dInsertSchema.Exists ( tSchema.m_dFields[i].m_sName ) ) { int iField = dInsertSchema[tSchema.m_dFields[i].m_sName]; if ( iField==iIdIndex ) { bIdDupe = true; break; } dFieldSchema[i] = iField; } else dFieldSchema[i] = -1; } if ( bIdDupe ) { pServed->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "fields must never be named 'id' (fix your config)" ); return; } // map attrs ARRAY_FOREACH ( j, dAttrSchema ) { if ( dInsertSchema.Exists ( tSchema.GetAttr(j).m_sName ) ) { int iField = dInsertSchema[tSchema.GetAttr(j).m_sName]; if ( iField==iIdIndex ) { bIdDupe = true; break; } dAttrSchema[j] = iField; } else dAttrSchema[j] = -1; } if ( bIdDupe ) { pServed->Unlock(); sError.SetSprintf ( "attributes must never be named 'id' (fix your config)" ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } } CSphVector dStrings; CSphVector dMvas; // convert attrs for ( int c=0; cUniq(); iLen = tVal.m_pVals->GetLength(); } if ( tCol.m_eAttrType==SPH_ATTR_UINT64SET ) { dMvas.Add ( iLen*2 ); for ( int j=0; j>32 ); dMvas.Add ( uLow ); dMvas.Add ( uHi ); } } else { dMvas.Add ( iLen ); for ( int j=0; j dFields; ARRAY_FOREACH ( i, tSchema.m_dFields ) { int iQuerySchemaIdx = dFieldSchema[i]; if ( iQuerySchemaIdx < 0 ) dFields.Add ( "" ); // default value else { if ( tStmt.m_dInsertValues [ iQuerySchemaIdx + c * iExp ].m_iType!=TOK_QUOTED_STRING ) { sError.SetSprintf ( "row %d, column %d: string expected", 1+c, 1+iQuerySchemaIdx ); // 1 for human base break; } dFields.Add ( tStmt.m_dInsertValues[ iQuerySchemaIdx + c * iExp ].m_sVal.cstr() ); } } if ( !sError.IsEmpty() ) break; // do add pIndex->AddDocument ( dFields.GetLength(), dFields.Begin(), tDoc, bReplace, dStrings.Begin(), dMvas, sError ); if ( !sError.IsEmpty() ) break; } // fire exit if ( !sError.IsEmpty() ) { pServed->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } // no errors so far if ( bCommit ) pIndex->Commit (); pServed->Unlock(); // my OK packet SendMysqlOkPacket ( tOut, uPacketID, tStmt.m_iRowsAffected ); } // our copy of enum_server_command // we can't rely on mysql_com.h because it might be unavailable // // MYSQL_COM_SLEEP = 0 // MYSQL_COM_QUIT = 1 // MYSQL_COM_INIT_DB = 2 // MYSQL_COM_QUERY = 3 // MYSQL_COM_FIELD_LIST = 4 // MYSQL_COM_CREATE_DB = 5 // MYSQL_COM_DROP_DB = 6 // MYSQL_COM_REFRESH = 7 // MYSQL_COM_SHUTDOWN = 8 // MYSQL_COM_STATISTICS = 9 // MYSQL_COM_PROCESS_INFO = 10 // MYSQL_COM_CONNECT = 11 // MYSQL_COM_PROCESS_KILL = 12 // MYSQL_COM_DEBUG = 13 // MYSQL_COM_PING = 14 // MYSQL_COM_TIME = 15 // MYSQL_COM_DELAYED_INSERT = 16 // MYSQL_COM_CHANGE_USER = 17 // MYSQL_COM_BINLOG_DUMP = 18 // MYSQL_COM_TABLE_DUMP = 19 // MYSQL_COM_CONNECT_OUT = 20 // MYSQL_COM_REGISTER_SLAVE = 21 // MYSQL_COM_STMT_PREPARE = 22 // MYSQL_COM_STMT_EXECUTE = 23 // MYSQL_COM_STMT_SEND_LONG_DATA = 24 // MYSQL_COM_STMT_CLOSE = 25 // MYSQL_COM_STMT_RESET = 26 // MYSQL_COM_SET_OPTION = 27 // MYSQL_COM_STMT_FETCH = 28 enum { MYSQL_COM_QUIT = 1, MYSQL_COM_INIT_DB = 2, MYSQL_COM_QUERY = 3, MYSQL_COM_PING = 14, MYSQL_COM_SET_OPTION = 27 }; void HandleMysqlCallSnippets ( NetOutputBuffer_c & tOut, BYTE uPacketID, SqlStmt_t & tStmt ) { CSphString sError; // check arguments // string data, string index, string query, [named opts] if ( tStmt.m_dInsertValues.GetLength()!=3 ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SNIPPETS() expectes exactly 3 arguments (data, index, query)" ); return; } if ( tStmt.m_dInsertValues[0].m_iType!=TOK_QUOTED_STRING && tStmt.m_dInsertValues[0].m_iType!=TOK_CONST_STRINGS ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SNIPPETS() argument 1 must be a string or a string list" ); return; } if ( tStmt.m_dInsertValues[1].m_iType!=TOK_QUOTED_STRING ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SNIPPETS() argument 2 must be a string" ); return; } if ( tStmt.m_dInsertValues[2].m_iType!=TOK_QUOTED_STRING ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SNIPPETS() argument 3 must be a string" ); return; } // do magics CSphString sIndex = tStmt.m_dInsertValues[1].m_sVal; ExcerptQuery_t q; q.m_sWords = tStmt.m_dInsertValues[2].m_sVal; ARRAY_FOREACH ( i, tStmt.m_dCallOptNames ) { CSphString & sOpt = tStmt.m_dCallOptNames[i]; const SqlInsert_t & v = tStmt.m_dCallOptValues[i]; sOpt.ToLower(); int iExpType = -1; if ( sOpt=="before_match" ) { q.m_sBeforeMatch = v.m_sVal; iExpType = TOK_QUOTED_STRING; } else if ( sOpt=="after_match" ) { q.m_sAfterMatch = v.m_sVal; iExpType = TOK_QUOTED_STRING; } else if ( sOpt=="chunk_separator" ) { q.m_sChunkSeparator = v.m_sVal; iExpType = TOK_QUOTED_STRING; } else if ( sOpt=="html_strip_mode" ) { q.m_sStripMode = v.m_sVal; iExpType = TOK_QUOTED_STRING; } else if ( sOpt=="passage_boundary" ) { q.m_sRawPassageBoundary = v.m_sVal; iExpType = TOK_QUOTED_STRING; } else if ( sOpt=="limit" ) { q.m_iLimit = (int)v.m_iVal; iExpType = TOK_CONST_INT; } else if ( sOpt=="limit_words" ) { q.m_iLimitWords = (int)v.m_iVal; iExpType = TOK_CONST_INT; } else if ( sOpt=="limit_passages" ) { q.m_iLimitPassages = (int)v.m_iVal; iExpType = TOK_CONST_INT; } else if ( sOpt=="around" ) { q.m_iAround = (int)v.m_iVal; iExpType = TOK_CONST_INT; } else if ( sOpt=="start_passage_id" ) { q.m_iPassageId = (int)v.m_iVal; iExpType = TOK_CONST_INT; } else if ( sOpt=="exact_phrase" ) { q.m_bExactPhrase = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="use_boundaries" ) { q.m_bUseBoundaries = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="weight_order" ) { q.m_bWeightOrder = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="query_mode" ) { q.m_bHighlightQuery = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="force_all_words" ) { q.m_bForceAllWords = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="load_files" ) { q.m_iLoadFiles = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="load_files_scattered" ) { q.m_iLoadFiles |= ( v.m_iVal!=0 )?2:0; iExpType = TOK_CONST_INT; } else if ( sOpt=="allow_empty" ) { q.m_bAllowEmpty = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else if ( sOpt=="emit_zones" ) { q.m_bEmitZones = ( v.m_iVal!=0 ); iExpType = TOK_CONST_INT; } else { sError.SetSprintf ( "unknown option %s", sOpt.cstr() ); break; } // post-conf type check if ( iExpType!=v.m_iType ) { sError.SetSprintf ( "unexpected option %s type", sOpt.cstr() ); break; } } if ( !sError.IsEmpty() ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } q.m_iPassageBoundary = sphGetPassageBoundary ( q.m_sRawPassageBoundary ); if ( !sphCheckOptionsSPZ ( q, q.m_sRawPassageBoundary, sError ) ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } q.m_bHasBeforePassageMacro = SnippetTransformPassageMacros ( q.m_sBeforeMatch, q.m_sBeforeMatchPassage ); q.m_bHasAfterPassageMacro = SnippetTransformPassageMacros ( q.m_sAfterMatch, q.m_sAfterMatchPassage ); q.m_iRawFlags = GetRawSnippetFlags ( q ); CSphVector dQueries; if ( tStmt.m_dInsertValues[0].m_iType==TOK_QUOTED_STRING ) { q.m_sSource = tStmt.m_dInsertValues[0].m_sVal; // OPTIMIZE? dQueries.Add ( q ); } else { dQueries.Resize ( tStmt.m_dCallStrings.GetLength() ); ARRAY_FOREACH ( i, tStmt.m_dCallStrings ) { dQueries[i] = q; // copy the settings dQueries[i].m_sSource = tStmt.m_dCallStrings[i]; // OPTIMIZE? } } if ( !MakeSnippets ( sIndex, dQueries, sError ) ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } CSphVector dResults ( dQueries.GetLength() ); ARRAY_FOREACH ( i, dResults ) dResults[i] = dQueries[i].m_sRes; bool bGotData = ARRAY_ANY ( bGotData, dResults, dResults[_any]!=NULL ); if ( !bGotData ) { // just one last error instead of all errors is hopefully ok sError.SetSprintf ( "highlighting failed: %s", sError.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 1 ); // field count (snippet) tOut.SendByte ( 0 ); // extra // fields SendMysqlFieldPacket ( tOut, uPacketID++, "snippet", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0 ); // data ARRAY_FOREACH ( i, dResults ) { const char * sResult = dResults[i] ? dResults[i] : ""; tOut.SendLSBDword ( ((uPacketID++)<<24) + MysqlPackedLen ( sResult ) ); tOut.SendMysqlString ( sResult ); } SendMysqlEofPacket ( tOut, uPacketID++, 0 ); ARRAY_FOREACH ( i, dResults ) SafeDeleteArray ( dResults[i] ); } void HandleMysqlCallKeywords ( NetOutputBuffer_c & tOut, BYTE uPacketID, SqlStmt_t & tStmt ) { CSphString sError; // string query, string index, [bool hits] int iArgs = tStmt.m_dInsertValues.GetLength(); if ( iArgs<2 || iArgs>3 || tStmt.m_dInsertValues[0].m_iType!=TOK_QUOTED_STRING || tStmt.m_dInsertValues[1].m_iType!=TOK_QUOTED_STRING || ( iArgs==3 && tStmt.m_dInsertValues[2].m_iType!=TOK_CONST_INT ) ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "bad argument count or types in KEYWORDS() call" ); return; } const ServedIndex_t * pServed = g_pIndexes->GetRlockedEntry ( tStmt.m_dInsertValues[1].m_sVal ); if ( !pServed || !pServed->m_bEnabled || !pServed->m_pIndex ) { sError.SetSprintf ( "no such index %s", tStmt.m_dInsertValues[1].m_sVal.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } CSphVector dKeywords; bool bStats = ( iArgs==3 && tStmt.m_dInsertValues[2].m_iVal!=0 ); bool bRes = pServed->m_pIndex->GetKeywords ( dKeywords, tStmt.m_dInsertValues[0].m_sVal.cstr(), bStats, sError ); pServed->Unlock (); if ( !bRes ) { sError.SetSprintf ( "keyword extraction failed: %s", sError.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 2 + ( bStats ? 2 : 0 ) ); // field count (tokenized, normalized, docs, hits) tOut.SendByte ( 0 ); // extra // fields SendMysqlFieldPacket ( tOut, uPacketID++, "tokenized", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "normalized", MYSQL_COL_STRING ); if ( bStats ) { SendMysqlFieldPacket ( tOut, uPacketID++, "docs", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "hits", MYSQL_COL_STRING ); } SendMysqlEofPacket ( tOut, uPacketID++, 0 ); // data ARRAY_FOREACH ( i, dKeywords ) { char sDocs[16], sHits[16]; snprintf ( sDocs, sizeof(sDocs), "%d", dKeywords[i].m_iDocs ); snprintf ( sHits, sizeof(sHits), "%d", dKeywords[i].m_iHits ); int iPacketLen = MysqlPackedLen ( dKeywords[i].m_sTokenized.cstr() ) + MysqlPackedLen ( dKeywords[i].m_sNormalized.cstr() ); if ( bStats ) iPacketLen += MysqlPackedLen ( sDocs ) + MysqlPackedLen ( sHits ); tOut.SendLSBDword ( ((uPacketID++)<<24) + iPacketLen ); tOut.SendMysqlString ( dKeywords[i].m_sTokenized.cstr() ); tOut.SendMysqlString ( dKeywords[i].m_sNormalized.cstr() ); if ( bStats ) { tOut.SendMysqlString ( sDocs ); tOut.SendMysqlString ( sHits ); } } SendMysqlEofPacket ( tOut, uPacketID++, 0 ); } void HandleMysqlDescribe ( NetOutputBuffer_c & tOut, BYTE uPacketID, SqlStmt_t & tStmt ) { const ServedIndex_t * pServed = g_pIndexes->GetRlockedEntry ( tStmt.m_sIndex ); if ( !pServed || !pServed->m_bEnabled || !pServed->m_pIndex ) { CSphString sError; sError.SetSprintf ( "no such index '%s'", tStmt.m_sIndex.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr(), MYSQL_ERR_NO_SUCH_TABLE ); return; } // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 2 ); // field count (field, type) tOut.SendByte ( 0 ); // extra // fields SendMysqlFieldPacket ( tOut, uPacketID++, "Field", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "Type", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0 ); // data const char * sIdType = USE_64BIT ? "bigint" : "integer"; tOut.SendLSBDword ( ((uPacketID++)<<24) + 3 + MysqlPackedLen ( sIdType ) ); tOut.SendMysqlString ( "id" ); tOut.SendMysqlString ( sIdType ); const CSphSchema & tSchema = pServed->m_pIndex->GetMatchSchema(); ARRAY_FOREACH ( i, tSchema.m_dFields ) { const CSphColumnInfo & tCol = tSchema.m_dFields[i]; tOut.SendLSBDword ( ((uPacketID++)<<24) + MysqlPackedLen ( tCol.m_sName.cstr() ) + 6 ); tOut.SendMysqlString ( tCol.m_sName.cstr() ); tOut.SendMysqlString ( "field" ); } for ( int i=0; iUnlock(); SendMysqlEofPacket ( tOut, uPacketID++, 0 ); } struct IndexNameLess_fn { inline bool IsLess ( const CSphNamedInt & a, const CSphNamedInt & b ) const { return strcasecmp ( a.m_sName.cstr(), b.m_sName.cstr() )<0; } }; void HandleMysqlShowTables ( NetOutputBuffer_c & tOut, BYTE uPacketID ) { // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 2 ); // field count (index, type) tOut.SendByte ( 0 ); // extra // fields SendMysqlFieldPacket ( tOut, uPacketID++, "Index", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "Type", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0 ); // all the indexes // 0 local, 1 distributed, 2 rt CSphVector dIndexes; for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) if ( it.Get().m_bEnabled ) { CSphNamedInt & tIdx = dIndexes.Add(); tIdx.m_sName = it.GetKey(); tIdx.m_iValue = it.Get().m_bRT ? 2 : 0; } g_tDistLock.Lock(); g_hDistIndexes.IterateStart(); while ( g_hDistIndexes.IterateNext() ) { CSphNamedInt & tIdx = dIndexes.Add(); tIdx.m_sName = g_hDistIndexes.IterateGetKey(); tIdx.m_iValue = 1; } g_tDistLock.Unlock(); dIndexes.Sort ( IndexNameLess_fn() ); ARRAY_FOREACH ( i, dIndexes ) { const char * sType = "?"; switch ( dIndexes[i].m_iValue ) { case 0: sType = "local"; break; case 1: sType = "distributed"; break; case 2: sType = "rt"; break; } tOut.SendLSBDword ( ((uPacketID++)<<24) + MysqlPackedLen ( dIndexes[i].m_sName.cstr() ) + MysqlPackedLen ( sType ) ); tOut.SendMysqlString ( dIndexes[i].m_sName.cstr() ); tOut.SendMysqlString ( sType ); } SendMysqlEofPacket ( tOut, uPacketID++, 0 ); } ///////////////////////////////////////////////////////////////////////////// // SMART UPDATES HANDLER ///////////////////////////////////////////////////////////////////////////// struct SphinxqlRequestBuilder_t : public IRequestBuilder_t { explicit SphinxqlRequestBuilder_t ( const CSphString sQuery, const SqlStmt_t & tStmt ) { m_sBegin.SetBinary ( sQuery.cstr(), tStmt.m_iListStart ); m_sEnd.SetBinary ( sQuery.cstr() + tStmt.m_iListEnd, sQuery.Length() - tStmt.m_iListEnd ); } virtual void BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int ) const; protected: CSphString m_sBegin; CSphString m_sEnd; }; struct SphinxqlReplyParser_t : public IReplyParser_t { explicit SphinxqlReplyParser_t ( int * pUpd, int * pWarns ) : m_pUpdated ( pUpd ) , m_pWarns ( pWarns ) {} virtual bool ParseReply ( MemInputBuffer_c & tReq, AgentConn_t &, int ) const { DWORD uSize = ( tReq.GetLSBDword() & 0x00FFFFFF ) - 1; BYTE uCommand = tReq.GetByte(); int iAffected = 0; int iWarns = 0; int iError = 0; int iInsert_id = 0; CSphString sMessage; switch ( uCommand ) { case 0: // ok packet iAffected = MysqlUnpack ( tReq, &uSize ); iInsert_id = MysqlUnpack ( tReq, &uSize ); iWarns = tReq.GetLSBDword(); uSize -= 4; if ( uSize ) sMessage = tReq.GetRawString ( uSize ); break; case 0xff: // error packet iError = tReq.GetByte() + ((int)tReq.GetByte()<<8); uSize -= 2; if ( uSize ) sMessage = tReq.GetRawString ( uSize ); break; default: break; } *m_pUpdated += iAffected; return true; } protected: int * m_pUpdated; int * m_pWarns; }; void SphinxqlRequestBuilder_t::BuildRequest ( const char * sIndexes, NetOutputBuffer_c & tOut, int ) const { int iReqSize = strlen(sIndexes) + m_sBegin.Length() + m_sEnd.Length(); // indexes string // header tOut.SendDword ( SPHINX_SEARCHD_PROTO ); tOut.SendWord ( SEARCHD_COMMAND_SPHINXQL ); tOut.SendWord ( VER_COMMAND_SPHINXQL ); tOut.SendInt ( iReqSize + 4 ); tOut.SendInt ( iReqSize ); tOut.SendBytes ( m_sBegin.cstr(), m_sBegin.Length() ); tOut.SendBytes ( sIndexes, strlen(sIndexes) ); tOut.SendBytes ( m_sEnd.cstr(), m_sEnd.Length() ); } ////////////////////////////////////////////////////////////////////////// static void DoExtendedUpdate ( const char * sIndex, const SqlStmt_t & tStmt, int & iSuccesses, int & iUpdated, bool bCommit, SearchFailuresLog_c & dFails, const ServedIndex_t * pServed ) { if ( !pServed || !pServed->m_pIndex || !pServed->m_bEnabled ) { if ( pServed ) pServed->Unlock(); dFails.Submit ( sIndex, "index not available" ); return; } SearchHandler_c tHandler ( 1, true ); // handler unlocks index at destructor - no need to do it manually CSphAttrUpdateEx tUpdate; CSphString sError; tUpdate.m_pUpdate = &tStmt.m_tUpdate; tUpdate.m_pIndex = pServed->m_pIndex; tUpdate.m_pError = &sError; tHandler.RunUpdates ( tStmt.m_tQuery, sIndex, &tUpdate ); if ( sError.Length() ) { dFails.Submit ( sIndex, sError.cstr() ); return; } if ( bCommit && pServed->m_bRT ) { ISphRtIndex * pIndex = static_cast ( pServed->m_pIndex ); pIndex->Commit (); } iUpdated += tUpdate.m_iAffected; iSuccesses++; } void HandleMysqlUpdate ( NetOutputBuffer_c & tOut, BYTE uPacketID, const SqlStmt_t & tStmt, const CSphString & sQuery, bool bCommit ) { CSphString sError; // check index names CSphVector dIndexNames; ParseIndexList ( tStmt.m_sIndex, dIndexNames ); if ( !dIndexNames.GetLength() ) { sError.SetSprintf ( "no such index '%s'", tStmt.m_sIndex.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } CSphVector dDistributed ( dIndexNames.GetLength() ); // lock safe storage for distributed indexes ARRAY_FOREACH ( i, dIndexNames ) { if ( !g_pIndexes->Exists ( dIndexNames[i] ) ) { // search amongst distributed and copy for further processing g_tDistLock.Lock(); const DistributedIndex_t * pDistIndex = g_hDistIndexes ( dIndexNames[i] ); if ( pDistIndex ) { dDistributed[i] = *pDistIndex; } g_tDistLock.Unlock(); if ( pDistIndex ) continue; else { sError.SetSprintf ( "unknown index '%s' in update request", dIndexNames[i].cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } } } // do update SearchFailuresLog_c dFails; int iSuccesses = 0; int iUpdated = 0; int iWarns = 0; bool bMvaUpdate = false; ARRAY_FOREACH_COND ( i, tStmt.m_tUpdate.m_dAttrs, !bMvaUpdate ) { bMvaUpdate = ( tStmt.m_tUpdate.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT32SET || tStmt.m_tUpdate.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ); } ARRAY_FOREACH ( iIdx, dIndexNames ) { const char * sReqIndex = dIndexNames[iIdx].cstr(); const ServedIndex_t * pLocked = UpdateGetLockedIndex ( sReqIndex, bMvaUpdate ); if ( pLocked ) { DoExtendedUpdate ( sReqIndex, tStmt, iSuccesses, iUpdated, bCommit, dFails, pLocked ); } else { assert ( dDistributed[iIdx].m_dLocal.GetLength() || dDistributed[iIdx].m_dAgents.GetLength() ); CSphVector& dLocal = dDistributed[iIdx].m_dLocal; ARRAY_FOREACH ( i, dLocal ) { const char * sLocal = dLocal[i].cstr(); const ServedIndex_t * pServed = UpdateGetLockedIndex ( sLocal, bMvaUpdate ); DoExtendedUpdate ( sLocal, tStmt, iSuccesses, iUpdated, bCommit, dFails, pServed ); } } // update remote agents if ( dDistributed[iIdx].m_dAgents.GetLength() ) { DistributedIndex_t & tDist = dDistributed[iIdx]; CSphVector dAgents ( tDist.m_dAgents.GetLength() ); ARRAY_FOREACH ( i, dAgents ) dAgents[i] = tDist.m_dAgents[i]; // connect to remote agents and query them ConnectToRemoteAgents ( dAgents, false ); SphinxqlRequestBuilder_t tReqBuilder ( sQuery, tStmt ); int iRemote = QueryRemoteAgents ( dAgents, tDist.m_iAgentConnectTimeout, tReqBuilder, NULL ); // FIXME? profile update time too? if ( iRemote ) { SphinxqlReplyParser_t tParser ( &iUpdated, &iWarns ); iSuccesses += WaitForRemoteAgents ( dAgents, tDist.m_iAgentQueryTimeout, tParser, NULL ); // FIXME? profile update time too? } } } StrBuf_t sReport; dFails.BuildReport ( sReport ); if ( !iSuccesses ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sReport.cstr() ); return; } SendMysqlOkPacket ( tOut, uPacketID, iUpdated, iWarns ); } ////////////////////////////////////////////////////////////////////////// #define SPH_MAX_NUMERIC_STR 64 class SqlRowBuffer_c { public: SqlRowBuffer_c () : m_pBuf ( NULL ) , m_iLen ( 0 ) , m_iLimit ( sizeof ( m_dBuf ) ) { } ~SqlRowBuffer_c () { SafeDeleteArray ( m_pBuf ); } char * Reserve ( int iLen ) { int iNewSize = m_iLen+iLen; if ( iNewSize<=m_iLimit ) return Get(); int iNewLimit = Max ( m_iLimit*2, iNewSize ); char * pBuf = new char [iNewLimit]; memcpy ( pBuf, m_pBuf ? m_pBuf : m_dBuf, m_iLen ); SafeDeleteArray ( m_pBuf ); m_pBuf = pBuf; m_iLimit = iNewLimit; return Get(); } char * Get () { return m_pBuf ? m_pBuf+m_iLen : m_dBuf+m_iLen; } char * Off ( int iOff ) { assert ( iOff void PutNumeric ( const char * sFormat, T tVal ) { Reserve ( SPH_MAX_NUMERIC_STR ); int iLen = snprintf ( Get()+1, SPH_MAX_NUMERIC_STR-1, sFormat, tVal ); *Get() = BYTE(iLen); IncPtr ( 1+iLen ); } void PutString ( const char * sMsg ) { int iLen = sMsg ? strlen ( sMsg ) : 0; Reserve ( 1+iLen ); char * pBegin = Get(); char * pStr = (char *)MysqlPack ( pBegin, iLen ); if ( pStr>pBegin ) { memcpy ( pStr, sMsg, iLen ); IncPtr ( ( pStr-pBegin )+iLen ); } } private: char m_dBuf[4096]; char * m_pBuf; int m_iLen; int m_iLimit; }; bool HandleMysqlSelect ( NetOutputBuffer_c & tOut, BYTE & uPacketID, SearchHandler_c & tHandler ) { // lets check all query for errors CSphString sError; CSphVector dAgentTimes; // dummy for error reporting ARRAY_FOREACH ( i, tHandler.m_dQueries ) { CheckQuery ( tHandler.m_dQueries[i], tHandler.m_dResults[i].m_sError ); if ( !tHandler.m_dResults[i].m_sError.IsEmpty() ) { LogQuery ( tHandler.m_dQueries[i], tHandler.m_dResults[i], dAgentTimes ); if ( sError.IsEmpty() ) sError.SetSprintf ( "query %d error: %s", i, tHandler.m_dResults[i].m_sError.cstr() ); else sError.SetSprintf ( "%s; query %d error: %s", sError.cstr(), i, tHandler.m_dResults[i].m_sError.cstr() ); } } if ( sError.Length() ) { // stmt is intentionally NULL, as we did all the reporting just above SendMysqlErrorPacket ( tOut, uPacketID, NULL, sError.cstr() ); return false; } // actual searching tHandler.RunQueries (); if ( g_bGotSigterm ) { sphLogDebug ( "HandleClientMySQL: got SIGTERM, sending the packet MYSQL_ERR_SERVER_SHUTDOWN" ); SendMysqlErrorPacket ( tOut, uPacketID, NULL, "Server shutdown in progress", MYSQL_ERR_SERVER_SHUTDOWN ); return false; } return true; } void SendMysqlSelectResult ( NetOutputBuffer_c & tOut, BYTE & uPacketID, SqlRowBuffer_c & dRows, const AggrResult_t & tRes, bool bMoreResultsFollow ) { if ( !tRes.m_iSuccesses ) { // at this point, SELECT error logging should have been handled, so pass a NULL stmt to logger SendMysqlErrorPacket ( tOut, uPacketID++, NULL, tRes.m_sError.cstr() ); return; } // empty result sets just might carry the full uberschema // bummer! lets protect ourselves against that int iSchemaAttrsCount = 0; int iAttrsCount = 1; if ( tRes.m_dMatches.GetLength() ) { iSchemaAttrsCount = SendGetAttrCount ( tRes.m_tSchema ); iAttrsCount = iSchemaAttrsCount; if ( g_bCompatResults ) iAttrsCount += 2; } if ( iAttrsCount>=251 ) { // this will show up as success in query log, as the query itself was ok // but we need some kind of a notice anyway, to nail down issues based on logs only sphWarning ( "selecting more than 250 columns is not supported yet" ); SendMysqlErrorPacket ( tOut, uPacketID++, NULL, "selecting more than 250 columns is not supported yet" ); return; } // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( BYTE(iAttrsCount) ); tOut.SendByte ( 0 ); // extra // field packets if ( !tRes.m_dMatches.GetLength() ) { // in case there are no matches, send a dummy schema SendMysqlFieldPacket ( tOut, uPacketID++, "id", USE_64BIT ? MYSQL_COL_LONGLONG : MYSQL_COL_LONG ); } else { // send result set schema if ( g_bCompatResults ) { SendMysqlFieldPacket ( tOut, uPacketID++, "id", USE_64BIT ? MYSQL_COL_LONGLONG : MYSQL_COL_LONG ); SendMysqlFieldPacket ( tOut, uPacketID++, "weight", MYSQL_COL_LONG ); } for ( int i=0; i ( DOCID_FMT, tMatch.m_iDocID ); dRows.PutNumeric ( "%u", tMatch.m_iWeight ); } const CSphSchema & tSchema = tRes.m_tSchema; for ( int i=0; i ( INT64_FMT, tMatch.GetAttr(tLoc) ); else dRows.PutNumeric ( "%u", (DWORD)tMatch.GetAttr(tLoc) ); break; case SPH_ATTR_FLOAT: dRows.PutNumeric ( "%f", tMatch.GetAttrFloat(tLoc) ); break; case SPH_ATTR_UINT64SET: case SPH_ATTR_UINT32SET: { int iLenOff = dRows.Length(); dRows.Reserve ( 4 ); dRows.IncPtr ( 4 ); assert ( tMatch.GetAttr ( tLoc )==0 || tRes.m_dTag2Pools [ tMatch.m_iTag ].m_pMva ); const DWORD * pValues = tMatch.GetAttrMVA ( tLoc, tRes.m_dTag2Pools [ tMatch.m_iTag ].m_pMva ); if ( pValues ) { DWORD nValues = *pValues++; assert ( eAttrType==SPH_ATTR_UINT32SET || ( nValues%2 )==0 ); if ( eAttrType==SPH_ATTR_UINT32SET ) { while ( nValues-- ) { dRows.Reserve ( SPH_MAX_NUMERIC_STR ); int iLen = snprintf ( dRows.Get(), SPH_MAX_NUMERIC_STR, nValues>0 ? "%u," : "%u", *pValues++ ); dRows.IncPtr ( iLen ); } } else { for ( ; nValues; nValues-=2, pValues+=2 ) { uint64_t uVal = MVA_UPSIZE ( pValues ); dRows.Reserve ( SPH_MAX_NUMERIC_STR ); int iLen = snprintf ( dRows.Get(), SPH_MAX_NUMERIC_STR, nValues>2 ? UINT64_FMT"," : UINT64_FMT, uVal ); dRows.IncPtr ( iLen ); } } } // manually pack length, forcibly into exactly 3 bytes int iLen = dRows.Length()-iLenOff-4; char * pLen = dRows.Off ( iLenOff ); pLen[0] = (BYTE)0xfd; pLen[1] = (BYTE)( iLen & 0xff ); pLen[2] = (BYTE)( ( iLen>>8 ) & 0xff ); pLen[3] = (BYTE)( ( iLen>>16 ) & 0xff ); break; } case SPH_ATTR_STRING: { const BYTE * pStrings = tRes.m_dTag2Pools [ tMatch.m_iTag ].m_pStrings; // get that string const BYTE * pStr = NULL; int iLen = 0; DWORD uOffset = (DWORD) tMatch.GetAttr ( tLoc ); if ( uOffset ) { assert ( pStrings ); iLen = sphUnpackStr ( pStrings+uOffset, &pStr ); } // send length dRows.Reserve ( iLen+4 ); char * pOutStr = (char*)MysqlPack ( dRows.Get(), iLen ); // send string data if ( iLen ) memcpy ( pOutStr, pStr, iLen ); dRows.IncPtr ( pOutStr-dRows.Get()+iLen ); break; } default: char * pDef = dRows.Reserve ( 2 ); pDef[0] = 1; pDef[1] = '-'; dRows.IncPtr ( 2 ); break; } } tOut.SendLSBDword ( ((uPacketID++)<<24) + ( dRows.Length() ) ); tOut.SendBytes ( dRows.Off ( 0 ), dRows.Length() ); dRows.Reset(); } // eof packet SendMysqlEofPacket ( tOut, uPacketID++, iWarns, bMoreResultsFollow ); } void HandleMysqlWarning ( NetOutputBuffer_c & tOut, BYTE & uPacketID, const CSphQueryResultMeta & tLastMeta, SqlRowBuffer_c & dRows, bool bMoreResultsFollow ) { // can't send simple ok if there are more results to send // as it breaks order of multi-result output if ( tLastMeta.m_sWarning.IsEmpty() && !bMoreResultsFollow ) { SendMysqlOkPacket ( tOut, uPacketID ); return; } // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 3 ); // field count (level+code+message) tOut.SendByte ( 0 ); // extra // field packets SendMysqlFieldPacket ( tOut, uPacketID++, "Level", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "Code", MYSQL_COL_DECIMAL ); SendMysqlFieldPacket ( tOut, uPacketID++, "Message", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0, bMoreResultsFollow ); // row dRows.Reset(); dRows.PutString ( "warning" ); dRows.PutString ( "1000" ); dRows.PutString ( tLastMeta.m_sWarning.cstr() ); tOut.SendLSBDword ( ((uPacketID++)<<24) + ( dRows.Length() ) ); tOut.SendBytes ( dRows.Off ( 0 ), dRows.Length() ); dRows.Reset(); // cleanup SendMysqlEofPacket ( tOut, uPacketID++, 0, bMoreResultsFollow ); } void HandleMysqlMeta ( NetOutputBuffer_c & tOut, BYTE & uPacketID, const CSphQueryResultMeta & tLastMeta, SqlRowBuffer_c & dRows, bool bStatus, bool bMoreResultsFollow ) { CSphVector dStatus; if ( bStatus ) BuildStatus ( dStatus ); else BuildMeta ( dStatus, tLastMeta ); // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 2 ); // field count (level+code+message) tOut.SendByte ( 0 ); // extra // field packets SendMysqlFieldPacket ( tOut, uPacketID++, "Variable_name", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "Value", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0, bMoreResultsFollow ); // send rows dRows.Reset(); for ( int iRow=0; iRowGetRlockedEntry ( tStmt.m_sIndex ); if ( !pServed ) { sError.SetSprintf ( "no such index '%s'", tStmt.m_sIndex.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } if ( !pServed->m_bRT || !pServed->m_bEnabled ) { pServed->Unlock(); sError.SetSprintf ( "index '%s' does not support DELETE (enabled=%d)", tStmt.m_sIndex.cstr(), pServed->m_bEnabled ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } ISphRtIndex * pIndex = static_cast ( pServed->m_pIndex ); if ( !pIndex->DeleteDocument ( tStmt.m_dDeleteIds.Begin(), tStmt.m_dDeleteIds.GetLength(), sError ) ) { pServed->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } if ( bCommit ) pIndex->Commit (); pServed->Unlock(); SendMysqlOkPacket ( tOut, uPacketID ); // FIXME? affected rows } void HandleMysqlMultiStmt ( NetOutputBuffer_c & tOut, BYTE uPacketID, const CSphVector & dStmt, CSphQueryResultMeta & tLastMeta, SqlRowBuffer_c & dRows, ThdDesc_t * pThd, const CSphString& sWarning ) { // select count int iSelect = 0; ARRAY_FOREACH ( i, dStmt ) if ( dStmt[i].m_eStmt==STMT_SELECT ) iSelect++; if ( !iSelect ) { tLastMeta = CSphQueryResultMeta(); return; } if ( pThd ) pThd->m_sCommand = g_dSqlStmts[STMT_SELECT]; // setup query for searching SearchHandler_c tHandler ( iSelect, true ); iSelect = 0; ARRAY_FOREACH ( i, dStmt ) { if ( dStmt[i].m_eStmt==STMT_SELECT ) tHandler.m_dQueries[iSelect++] = dStmt[i].m_tQuery; } // do search bool bSearchOK = HandleMysqlSelect ( tOut, uPacketID, tHandler ); // save meta for SHOW * CSphQueryResultMeta tPrevMeta = tLastMeta; tLastMeta = tHandler.m_dResults.Last(); if ( !bSearchOK ) return; // send multi-result set iSelect = 0; ARRAY_FOREACH ( i, dStmt ) { SqlStmt_e eStmt = dStmt[i].m_eStmt; THD_STATE ( THD_QUERY ); if ( pThd ) pThd->m_sCommand = g_dSqlStmts[eStmt]; const CSphQueryResultMeta & tMeta = iSelect-1>=0 ? tHandler.m_dResults[iSelect-1] : tPrevMeta; bool bMoreResultsFollow = (i+1)SetSprintf ( "Unknown collation: '%s'", sName.cstr() ); return SPH_COLLATION_DEFAULT; } void HandleMysqlSet ( NetOutputBuffer_c & tOut, BYTE & uPacketID, SqlStmt_t & tStmt, SessionVars_t & tVars ) { MEMORY ( SPH_MEM_COMMIT_SET_SQL ); CSphString sError; tStmt.m_sSetName.ToLower(); switch ( tStmt.m_eSet ) { case SET_LOCAL: if ( tStmt.m_sSetName=="autocommit" ) { // per-session AUTOCOMMIT tVars.m_bAutoCommit = ( tStmt.m_iSetValue!=0 ); tVars.m_bInTransaction = false; // commit all pending changes if ( tVars.m_bAutoCommit ) { ISphRtIndex * pIndex = sphGetCurrentIndexRT(); if ( pIndex ) pIndex->Commit(); } } else if ( tStmt.m_sSetName=="collation_connection" ) { // per-session COLLATION_CONNECTION CSphString & sVal = tStmt.m_sSetValue; sVal.ToLower(); tVars.m_eCollation = sphCollationFromName ( sVal, &sError ); if ( !sError.IsEmpty() ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } } else if ( tStmt.m_sSetName=="character_set_results" || tStmt.m_sSetName=="sql_auto_is_null" || tStmt.m_sSetName=="sql_mode" ) { // per-session CHARACTER_SET_RESULTS et al; just ignore for now } else { // unknown variable, return error sError.SetSprintf ( "Unknown session variable '%s' in SET statement", tStmt.m_sSetName.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } break; case SET_GLOBAL_UVAR: { // global user variable if ( g_eWorkers!=MPM_THREADS ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SET GLOBAL currently requires workers=threads" ); return; } // INT_SET type must be sorted tStmt.m_dSetValues.Sort(); // create or update the variable g_tUservarsMutex.Lock(); Uservar_t * pVar = g_hUservars ( tStmt.m_sSetName ); if ( pVar ) { // variable exists, release previous value // actual destruction of the value (aka data) might happen later // as the concurrent queries might still be using and holding that data // from here, the old value becomes nameless, though assert ( pVar->m_eType==USERVAR_INT_SET ); assert ( pVar->m_pVal ); pVar->m_pVal->Release(); pVar->m_pVal = NULL; } else { // create a shiny new variable Uservar_t tVar; g_hUservars.Add ( tVar, tStmt.m_sSetName ); pVar = g_hUservars ( tStmt.m_sSetName ); } // swap in the new value assert ( pVar ); assert ( !pVar->m_pVal ); pVar->m_eType = USERVAR_INT_SET; pVar->m_pVal = new UservarIntSet_c(); pVar->m_pVal->SwapData ( tStmt.m_dSetValues ); g_tUservarsMutex.Unlock(); break; } case SET_GLOBAL_SVAR: // global server variable if ( g_eWorkers!=MPM_THREADS ) { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "SET GLOBAL currently requires workers=threads" ); return; } if ( tStmt.m_sSetName=="query_log_format" ) { if ( tStmt.m_sSetValue=="plain" ) g_eLogFormat = LOG_FORMAT_PLAIN; else if ( tStmt.m_sSetValue=="sphinxql" ) g_eLogFormat = LOG_FORMAT_SPHINXQL; else { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "Unknown query_log_format value (must be plain or sphinxql)" ); return; } } else if ( tStmt.m_sSetName=="log_level" ) { if ( tStmt.m_sSetValue=="info" ) g_eLogLevel = SPH_LOG_INFO; else if ( tStmt.m_sSetValue=="debug" ) g_eLogLevel = SPH_LOG_DEBUG; else if ( tStmt.m_sSetValue=="debugv" ) g_eLogLevel = SPH_LOG_VERBOSE_DEBUG; else if ( tStmt.m_sSetValue=="debugvv" ) g_eLogLevel = SPH_LOG_VERY_VERBOSE_DEBUG; else { SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "Unknown log_level value (must be one of info, debug, debugv, debugvv)" ); return; } } else { sError.SetSprintf ( "Unknown system variable '%s'", tStmt.m_sSetName.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } break; default: sError.SetSprintf ( "INTERNAL ERROR: unhandle SET mode %d", (int)tStmt.m_eSet ); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } // it went ok SendMysqlOkPacket ( tOut, uPacketID ); } void HandleMysqlAttach ( const SqlStmt_t & tStmt, NetOutputBuffer_c & tOut, BYTE uPacketID ) { const CSphString & sFrom = tStmt.m_sIndex; const CSphString & sTo = tStmt.m_sSetName; CSphString sError; ServedIndex_t * pFrom = g_pIndexes->GetWlockedEntry ( sFrom ); const ServedIndex_t * pTo = g_pIndexes->GetRlockedEntry ( sTo ); if ( !pFrom || !pFrom->m_bEnabled || !pTo || !pTo->m_bEnabled || pFrom->m_bRT || !pTo->m_bRT ) { if ( !pFrom || !pFrom->m_bEnabled ) SendMysqlErrorPacketEx ( tOut, uPacketID, MYSQL_ERR_PARSE_ERROR, "no such index '%s'", sFrom.cstr() ); else if ( !pTo || !pTo->m_bEnabled ) SendMysqlErrorPacketEx ( tOut, uPacketID, MYSQL_ERR_PARSE_ERROR, "no such index '%s'", sTo.cstr() ); else if ( pFrom->m_bRT ) SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "1st argument to ATTACH must be a plain index" ); else if ( pTo->m_bRT ) SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "2nd argument to ATTACH must be a RT index" ); if ( pFrom ) pFrom->Unlock(); if ( pTo ) pTo->Unlock(); return; } ISphRtIndex * pRtTo = dynamic_cast ( pTo->m_pIndex ); assert ( pRtTo ); if ( !pRtTo->AttachDiskIndex ( pFrom->m_pIndex, sError ) ) { pFrom->Unlock(); pTo->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, sError.cstr() ); return; } pFrom->m_pIndex = NULL; // after a succesfull Attach() RT index owns it pFrom->m_bEnabled = false; // so we need to disable the disk index until further notice pFrom->Unlock(); pTo->Unlock(); SendMysqlOkPacket ( tOut, uPacketID ); } void HandleMysqlFlush ( const SqlStmt_t & tStmt, NetOutputBuffer_c & tOut, BYTE uPacketID ) { CSphString sError; const ServedIndex_t * pIndex = g_pIndexes->GetRlockedEntry ( tStmt.m_sIndex ); if ( !pIndex || !pIndex->m_bEnabled || !pIndex->m_bRT ) { if ( pIndex ) pIndex->Unlock(); SendMysqlErrorPacket ( tOut, uPacketID, tStmt.m_sStmt, "FLUSH RTINDEX requires an existing RT index" ); return; } ISphRtIndex * pRt = dynamic_cast ( pIndex->m_pIndex ); assert ( pRt ); pRt->ForceRamFlush(); pIndex->Unlock(); SendMysqlOkPacket ( tOut, uPacketID ); } void SendMysqlPair ( NetOutputBuffer_c & tOut, BYTE & uPacketID, SqlRowBuffer_c & dRows, const char * sKey, const char * sValue ) { dRows.PutString ( sKey ); dRows.PutString ( sValue ); tOut.SendLSBDword ( ((uPacketID++)<<24) + ( dRows.Length() ) ); tOut.SendBytes ( dRows.Off ( 0 ), dRows.Length() ); dRows.Reset(); } const char * sphCollationToName ( ESphCollation eColl ) { switch ( eColl ) { case SPH_COLLATION_LIBC_CI: return "libc_ci"; case SPH_COLLATION_LIBC_CS: return "libc_cs"; case SPH_COLLATION_UTF8_GENERAL_CI: return "utf8_general_ci"; case SPH_COLLATION_BINARY: return "binary"; default: return "unknown"; } } static const char * LogLevelName ( ESphLogLevel eLevel ) { switch ( eLevel ) { case SPH_LOG_FATAL: return "fatal"; case SPH_LOG_WARNING: return "warning"; case SPH_LOG_INFO: return "info"; case SPH_LOG_DEBUG: return "debug"; case SPH_LOG_VERBOSE_DEBUG: return "debugv"; case SPH_LOG_VERY_VERBOSE_DEBUG: return "debugvv"; default: return "unknown"; } } void HandleMysqlShowVariables ( const SqlStmt_t &, NetOutputBuffer_c & tOut, BYTE uPacketID, SqlRowBuffer_c & dRows, SessionVars_t & tVars ) { // result set header packet tOut.SendLSBDword ( ((uPacketID++)<<24) + 2 ); tOut.SendByte ( 2 ); // field count (level+code+message) tOut.SendByte ( 0 ); // extra // field packets SendMysqlFieldPacket ( tOut, uPacketID++, "Variable_name", MYSQL_COL_STRING ); SendMysqlFieldPacket ( tOut, uPacketID++, "Value", MYSQL_COL_STRING ); SendMysqlEofPacket ( tOut, uPacketID++, 0 ); // send rows dRows.Reset(); // sessions vars SendMysqlPair ( tOut, uPacketID, dRows, "autocommit", tVars.m_bAutoCommit ? "1" : "0" ); SendMysqlPair ( tOut, uPacketID, dRows, "collation_connection", sphCollationToName ( tVars.m_eCollation ) ); // server vars SendMysqlPair ( tOut, uPacketID, dRows, "query_log_format", g_eLogFormat==LOG_FORMAT_PLAIN ? "plain" : "sphinxql" ); SendMysqlPair ( tOut, uPacketID, dRows, "log_level", LogLevelName ( g_eLogLevel ) ); // cleanup SendMysqlEofPacket ( tOut, uPacketID++, 0 ); } class CSphinxqlSession : public ISphNoncopyable { CSphString & m_sError; public: CSphQueryResultMeta m_tLastMeta; SessionVars_t m_tVars; public: explicit CSphinxqlSession ( CSphString & sError ) : m_sError ( sError ) {} // just execute one sphinxql statement void Execute ( const CSphString & sQuery, NetOutputBuffer_c & tOut, BYTE & uPacketID, ThdDesc_t * pThd=NULL ) { // set on query guard CrashQuery_t tCrashQuery; tCrashQuery.m_pQuery = (const BYTE *)sQuery.cstr(); tCrashQuery.m_iSize = sQuery.Length(); tCrashQuery.m_bMySQL = true; SphCrashLogger_c::SetLastQuery ( tCrashQuery ); // parse SQL query CSphVector dStmt; bool bParsedOK = ParseSqlQuery ( sQuery, dStmt, m_sError, m_tVars.m_eCollation ); SqlStmt_e eStmt = STMT_PARSE_ERROR; if ( bParsedOK ) { eStmt = dStmt[0].m_eStmt; dStmt[0].m_sStmt = sQuery.cstr(); } SqlStmt_t * pStmt = dStmt.Begin(); assert ( !bParsedOK || pStmt ); if ( pThd ) pThd->m_sCommand = g_dSqlStmts[eStmt]; THD_STATE ( THD_QUERY ); SqlRowBuffer_c dRows; // handle multi SQL query if ( bParsedOK && dStmt.GetLength()>1 ) { HandleMysqlMultiStmt ( tOut, uPacketID, dStmt, m_tLastMeta, dRows, pThd, m_sError ); return; } // handle SQL query switch ( eStmt ) { case STMT_PARSE_ERROR: m_tLastMeta = CSphQueryResultMeta(); m_tLastMeta.m_sError = m_sError; m_tLastMeta.m_sWarning = ""; SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), m_sError.cstr() ); return; case STMT_SELECT: { MEMORY ( SPH_MEM_SELECT_SQL ); SearchHandler_c tHandler ( 1, true ); tHandler.m_dQueries[0] = dStmt.Begin()->m_tQuery; if ( HandleMysqlSelect ( tOut, uPacketID, tHandler ) ) { // query just completed ok; reset out error message m_sError = ""; AggrResult_t & tLast = tHandler.m_dResults.Last(); SendMysqlSelectResult ( tOut, uPacketID, dRows, tLast, false ); } // save meta for SHOW META m_tLastMeta = tHandler.m_dResults.Last(); return; } case STMT_SHOW_WARNINGS: HandleMysqlWarning ( tOut, uPacketID, m_tLastMeta, dRows, false ); return; case STMT_SHOW_STATUS: case STMT_SHOW_META: HandleMysqlMeta ( tOut, uPacketID, m_tLastMeta, dRows, eStmt==STMT_SHOW_STATUS, false ); return; case STMT_INSERT: case STMT_REPLACE: HandleMysqlInsert ( *pStmt, tOut, uPacketID, eStmt==STMT_REPLACE, m_tVars.m_bAutoCommit && !m_tVars.m_bInTransaction ); return; case STMT_DELETE: HandleMysqlDelete ( tOut, uPacketID, *pStmt, m_tVars.m_bAutoCommit && !m_tVars.m_bInTransaction ); return; case STMT_SET: HandleMysqlSet ( tOut, uPacketID, *pStmt, m_tVars ); return; case STMT_BEGIN: { MEMORY ( SPH_MEM_COMMIT_BEGIN_SQL ); m_tVars.m_bInTransaction = true; ISphRtIndex * pIndex = sphGetCurrentIndexRT(); if ( pIndex ) pIndex->Commit(); SendMysqlOkPacket ( tOut, uPacketID ); return; } case STMT_COMMIT: case STMT_ROLLBACK: { MEMORY ( SPH_MEM_COMMIT_SQL ); m_tVars.m_bInTransaction = false; ISphRtIndex * pIndex = sphGetCurrentIndexRT(); if ( pIndex ) { if ( eStmt==STMT_COMMIT ) pIndex->Commit(); else pIndex->RollBack(); } SendMysqlOkPacket ( tOut, uPacketID ); return; } case STMT_CALL: pStmt->m_sCallProc.ToUpper(); if ( pStmt->m_sCallProc=="SNIPPETS" ) HandleMysqlCallSnippets ( tOut, uPacketID, *pStmt ); else if ( pStmt->m_sCallProc=="KEYWORDS" ) HandleMysqlCallKeywords ( tOut, uPacketID, *pStmt ); else { m_sError.SetSprintf ( "no such builtin procedure %s", pStmt->m_sCallProc.cstr() ); SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), m_sError.cstr() ); } return; case STMT_DESC: HandleMysqlDescribe ( tOut, uPacketID, *pStmt ); return; case STMT_SHOW_TABLES: HandleMysqlShowTables ( tOut, uPacketID ); return; case STMT_UPDATE: HandleMysqlUpdate ( tOut, uPacketID, *pStmt, sQuery, m_tVars.m_bAutoCommit && !m_tVars.m_bInTransaction ); return; case STMT_DUMMY: SendMysqlOkPacket ( tOut, uPacketID ); return; case STMT_CREATE_FUNC: if ( !sphUDFCreate ( pStmt->m_sUdfLib.cstr(), pStmt->m_sUdfName.cstr(), pStmt->m_eUdfType, m_sError ) ) SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), m_sError.cstr() ); else SendMysqlOkPacket ( tOut, uPacketID ); return; case STMT_DROP_FUNC: if ( !sphUDFDrop ( pStmt->m_sUdfName.cstr(), m_sError ) ) SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), m_sError.cstr() ); else SendMysqlOkPacket ( tOut, uPacketID ); return; case STMT_ATTACH_INDEX: HandleMysqlAttach ( *pStmt, tOut, uPacketID ); return; case STMT_FLUSH_RTINDEX: HandleMysqlFlush ( *pStmt, tOut, uPacketID ); return; case STMT_SHOW_VARIABLES: HandleMysqlShowVariables ( *pStmt, tOut, uPacketID, dRows, m_tVars ); return; default: m_sError.SetSprintf ( "internal error: unhandled statement type (value=%d)", eStmt ); SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), m_sError.cstr() ); return; } // switch } }; /// sphinxql command over API void HandleCommandSphinxql ( int iSock, int iVer, InputBuffer_c & tReq ) { if ( !CheckCommandVersion ( iVer, VER_COMMAND_SPHINXQL, tReq ) ) return; // parse request CSphString sCommand = tReq.GetString (); NetOutputBuffer_c tOut ( iSock ); BYTE uDummy = 0; CSphString sError; // todo: move upper, if the session variables are also necessary in API access mode. CSphinxqlSession tSession ( sError ); tOut.Flush(); tOut.SendWord ( SEARCHD_OK ); tOut.SendWord ( VER_COMMAND_SPHINXQL ); // assume that the whole answer could fit in output buffer without flush. // Otherwise the error will be fired. // SEARCHD_ERROR + strlen (32) + the message tOut.FreezeBlock ( "\x01\x00\x20\x00\x00\x00The output buffer is overloaded.", 38 ); tSession.Execute ( sCommand, tOut, uDummy ); tOut.Flush ( true ); } void HandleClientMySQL ( int iSock, const char * sClientIP, ThdDesc_t * pThd ) { MEMORY ( SPH_MEM_HANDLE_SQL ); THD_STATE ( THD_HANDSHAKE ); const int INTERACTIVE_TIMEOUT = 900; NetInputBuffer_c tIn ( iSock ); NetOutputBuffer_c tOut ( iSock ); // OPTIMIZE? looks like buffer size matters a lot.. if ( sphSockSend ( iSock, g_sMysqlHandshake, g_iMysqlHandshake )!=g_iMysqlHandshake ) { int iErrno = sphSockGetErrno (); sphWarning ( "failed to send server version (client=%s, error: %d '%s')", sClientIP, iErrno, sphSockError ( iErrno ) ); return; } bool bAuthed = false; BYTE uPacketID = 1; CSphString sError; CSphinxqlSession tSession ( sError ); // session variables and state CSphString sQuery; // to keep data alive for SphCrashQuery_c for ( ;; ) { // set off query guard CrashQuery_t tCrashQuery; tCrashQuery.m_bMySQL = true; SphCrashLogger_c::SetLastQuery ( tCrashQuery ); // send the packet formed on the previous cycle THD_STATE ( THD_NET_WRITE ); if ( !tOut.Flush() ) break; // get next packet // we want interruptible calls here, so that shutdowns could be honoured THD_STATE ( THD_NET_READ ); if ( !tIn.ReadFrom ( 4, INTERACTIVE_TIMEOUT, true ) ) break; const int MAX_PACKET_LEN = 0xffffffL; // 16777215 bytes, max low level packet size DWORD uPacketHeader = tIn.GetLSBDword (); int iPacketLen = ( uPacketHeader & MAX_PACKET_LEN ); if ( !tIn.ReadFrom ( iPacketLen, INTERACTIVE_TIMEOUT, true ) ) break; // handle it! uPacketID = 1 + (BYTE)( uPacketHeader>>24 ); // client will expect this id // handle big packets if ( iPacketLen==MAX_PACKET_LEN ) { NetInputBuffer_c tIn2 ( iSock ); int iAddonLen = -1; do { if ( !tIn2.ReadFrom ( 4, INTERACTIVE_TIMEOUT, true ) ) break; DWORD uAddon = tIn2.GetLSBDword(); uPacketID = 1 + (BYTE)( uAddon>>24 ); iAddonLen = ( uAddon & MAX_PACKET_LEN ); if ( !tIn.ReadFrom ( iAddonLen, INTERACTIVE_TIMEOUT, true, true ) ) { iAddonLen = -1; break; } iPacketLen += iAddonLen; } while ( iAddonLen==MAX_PACKET_LEN ); if ( iAddonLen<0 ) break; } // handle auth packet if ( !bAuthed ) { bAuthed = true; SendMysqlOkPacket ( tOut, uPacketID ); continue; } // get command, handle special packets const BYTE uMysqlCmd = tIn.GetByte (); if ( uMysqlCmd==MYSQL_COM_QUIT ) { // client is done break; } else if ( uMysqlCmd==MYSQL_COM_PING || uMysqlCmd==MYSQL_COM_INIT_DB ) { // client wants a pong SendMysqlOkPacket ( tOut, uPacketID ); continue; } else if ( uMysqlCmd==MYSQL_COM_SET_OPTION ) { // bMulti = ( tIn.GetWord()==MYSQL_OPTION_MULTI_STATEMENTS_ON ); // that's how we could double check and validate multi query // server reporting success in response to COM_SET_OPTION and COM_DEBUG SendMysqlEofPacket ( tOut, uPacketID, 0 ); continue; } else if ( uMysqlCmd!=MYSQL_COM_QUERY ) { // default case, unknown command // (and query is handled just below) sError.SetSprintf ( "unknown command (code=%d)", uMysqlCmd ); SendMysqlErrorPacket ( tOut, uPacketID, sQuery.cstr(), sError.cstr(), MYSQL_ERR_UNKNOWN_COM_ERROR ); continue; } // handle query packet assert ( uMysqlCmd==MYSQL_COM_QUERY ); sQuery = tIn.GetRawString ( iPacketLen-1 ); tSession.Execute ( sQuery, tOut, uPacketID, pThd ); } // for (;;) // set off query guard SphCrashLogger_c::SetLastQuery ( CrashQuery_t() ); } ////////////////////////////////////////////////////////////////////////// // HANDLE-BY-LISTENER ////////////////////////////////////////////////////////////////////////// void HandleClient ( ProtocolType_e eProto, int iSock, const char * sClientIP, ThdDesc_t * pThd ) { switch ( eProto ) { case PROTO_SPHINX: HandleClientSphinx ( iSock, sClientIP, pThd ); break; case PROTO_MYSQL41: HandleClientMySQL ( iSock, sClientIP, pThd ); break; default: assert ( 0 && "unhandled protocol type" ); break; } } ///////////////////////////////////////////////////////////////////////////// // INDEX ROTATION ///////////////////////////////////////////////////////////////////////////// bool TryRename ( const char * sIndex, const char * sPrefix, const char * sFromPostfix, const char * sToPostfix, bool bFatal ) { char sFrom [ SPH_MAX_FILENAME_LEN ]; char sTo [ SPH_MAX_FILENAME_LEN ]; snprintf ( sFrom, sizeof(sFrom), "%s%s", sPrefix, sFromPostfix ); snprintf ( sTo, sizeof(sTo), "%s%s", sPrefix, sToPostfix ); #if USE_WINDOWS ::unlink ( sTo ); #endif if ( rename ( sFrom, sTo ) ) { if ( bFatal ) { sphFatal ( "rotating index '%s': rollback rename '%s' to '%s' failed: %s", sIndex, sFrom, sTo, strerror(errno) ); } else { sphWarning ( "rotating index '%s': rename '%s' to '%s' failed: %s", sIndex, sFrom, sTo, strerror(errno) ); } return false; } return true; } bool HasFiles ( const ServedIndex_t & tIndex, const char ** dExts ) { char sFile [ SPH_MAX_FILENAME_LEN ]; const char * sPath = tIndex.m_sIndexPath.cstr(); for ( int i=0; i0 ) { if ( tIndex.m_bOnlyNew ) sphWarning ( "rotating index '%s': '%s' unreadable: %s; NOT SERVING", sIndex, sFile, strerror(errno) ); else sphWarning ( "rotating index '%s': '%s' unreadable: %s; using old index", sIndex, sFile, strerror(errno) ); } return false; } } sphLogDebug ( "RotateIndexGreedy: new index is readable" ); if ( !tIndex.m_bOnlyNew ) { // rename current to old for ( int i=0; iLeakTokenizer (); // FIXME! disable support of that old indexes and remove this bullshit CSphDict * pDictionary = tIndex.m_pIndex->LeakDictionary (); if ( !tIndex.m_pIndex->Prealloc ( tIndex.m_bMlock, g_bStripPath, sWarning ) || !tIndex.m_pIndex->Preread() ) { if ( tIndex.m_bOnlyNew ) { sphWarning ( "rotating index '%s': .new preload failed: %s; NOT SERVING", sIndex, tIndex.m_pIndex->GetLastError().cstr() ); return false; } else { sphWarning ( "rotating index '%s': .new preload failed: %s", sIndex, tIndex.m_pIndex->GetLastError().cstr() ); // try to recover for ( int j=0; jPrealloc ( tIndex.m_bMlock, g_bStripPath, sWarning ) || !tIndex.m_pIndex->Preread() ) { sphWarning ( "rotating index '%s': .new preload failed; ROLLBACK FAILED; INDEX UNUSABLE", sIndex ); tIndex.m_bEnabled = false; } else { tIndex.m_bEnabled = true; bPreread = true; sphWarning ( "rotating index '%s': .new preload failed; using old index", sIndex ); } if ( !sWarning.IsEmpty() ) sphWarning ( "rotating index '%s': %s", sIndex, sWarning.cstr() ); if ( !tIndex.m_pIndex->GetTokenizer () ) tIndex.m_pIndex->SetTokenizer ( pTokenizer ); else SafeDelete ( pTokenizer ); if ( !tIndex.m_pIndex->GetDictionary () ) tIndex.m_pIndex->SetDictionary ( pDictionary ); else SafeDelete ( pDictionary ); } return bPreread; } else { bPreread = true; if ( !sWarning.IsEmpty() ) sphWarning ( "rotating index '%s': %s", sIndex, sWarning.cstr() ); } if ( !tIndex.m_pIndex->GetTokenizer () ) tIndex.m_pIndex->SetTokenizer ( pTokenizer ); else SafeDelete ( pTokenizer ); if ( !tIndex.m_pIndex->GetDictionary () ) tIndex.m_pIndex->SetDictionary ( pDictionary ); else SafeDelete ( pDictionary ); // unlink .old if ( g_bUnlinkOld && !tIndex.m_bOnlyNew ) { for ( int i=0; i0 && tmLastLog+iAllocLogPeriodGetSettings (); if ( ( tSettings.m_iMinPrefixLen>0 || tSettings.m_iMinInfixLen>0 ) && !pIndex->IsStarEnabled() ) { CSphDict * pDict = pIndex->GetDictionary (); assert ( pDict ); if ( pDict->HasMorphology () ) { sError = "infixes and morphology are enabled, enable_star=0"; return false; } } return true; } static bool CheckServedEntry ( const ServedIndex_t * pEntry, const char * sIndex ) { if ( !pEntry ) { sphWarning ( "rotating index '%s': INTERNAL ERROR, index went AWOL", sIndex ); return false; } if ( pEntry->m_bToDelete || !pEntry->m_pIndex ) { if ( pEntry->m_bToDelete ) sphWarning ( "rotating index '%s': INTERNAL ERROR, entry marked for deletion", sIndex ); if ( !pEntry->m_pIndex ) sphWarning ( "rotating index '%s': INTERNAL ERROR, entry does not have an index", sIndex ); return false; } return true; } #define SPH_RT_AUTO_FLUSH_CHECK_PERIOD ( 5000000 ) static void RtFlushThreadFunc ( void * ) { int64_t tmNextCheck = sphMicroTimer() + SPH_RT_AUTO_FLUSH_CHECK_PERIOD; while ( !g_bRtFlushShutdown ) { // stand still till save time if ( tmNextCheck>sphMicroTimer() ) { sphSleepMsec ( 50 ); continue; } // collecting available rt indexes at save time CSphVector dRtIndexes; for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) if ( it.Get().m_bRT ) dRtIndexes.Add ( it.GetKey() ); // do check+save ARRAY_FOREACH_COND ( i, dRtIndexes, !g_bRtFlushShutdown ) { const ServedIndex_t * pServed = g_pIndexes->GetRlockedEntry ( dRtIndexes[i] ); if ( !pServed ) continue; if ( !pServed->m_bEnabled ) { pServed->Unlock(); continue; } ISphRtIndex * pRT = (ISphRtIndex *)pServed->m_pIndex; pRT->CheckRamFlush(); pServed->Unlock(); } tmNextCheck = sphMicroTimer() + SPH_RT_AUTO_FLUSH_CHECK_PERIOD; } } static void RotateIndexMT ( const CSphString & sIndex ) { assert ( g_eWorkers==MPM_THREADS ); ////////////////// // load new index ////////////////// // create new index, copy some settings from existing one const ServedIndex_t * pRotating = g_pIndexes->GetRlockedEntry ( sIndex ); if ( !CheckServedEntry ( pRotating, sIndex.cstr() ) ) { if ( pRotating ) pRotating->Unlock(); return; } sphInfo ( "rotating index '%s': started", sIndex.cstr() ); ServedIndex_t tNewIndex; tNewIndex.m_bOnlyNew = pRotating->m_bOnlyNew; tNewIndex.m_pIndex = sphCreateIndexPhrase ( NULL, NULL ); // FIXME! check if it's ok tNewIndex.m_pIndex->SetEnableStar ( pRotating->m_bStar ); tNewIndex.m_pIndex->m_bExpandKeywords = pRotating->m_bExpand; tNewIndex.m_pIndex->SetPreopen ( pRotating->m_bPreopen || g_bPreopenIndexes ); tNewIndex.m_pIndex->SetWordlistPreload ( !pRotating->m_bOnDiskDict && !g_bOnDiskDicts ); // rebase new index char sNewPath [ SPH_MAX_FILENAME_LEN ]; snprintf ( sNewPath, sizeof(sNewPath), "%s.new", pRotating->m_sIndexPath.cstr() ); tNewIndex.m_pIndex->SetBase ( sNewPath ); // don't need to hold the existing index any more now pRotating->Unlock(); pRotating = NULL; // prealloc enough RAM and lock new index sphLogDebug ( "prealloc enough RAM and lock new index" ); CSphString sWarn, sError; if ( !tNewIndex.m_pIndex->Prealloc ( tNewIndex.m_bMlock, g_bStripPath, sWarn ) ) { sphWarning ( "rotating index '%s': prealloc: %s; using old index", sIndex.cstr(), tNewIndex.m_pIndex->GetLastError().cstr() ); return; } if ( !tNewIndex.m_pIndex->Lock() ) { sphWarning ( "rotating index '%s': lock: %s; using old index", sIndex.cstr (), tNewIndex.m_pIndex->GetLastError().cstr() ); return; } // fixup settings if needed sphLogDebug ( "fixup settings if needed" ); g_tRotateConfigMutex.Lock (); if ( tNewIndex.m_bOnlyNew && g_pCfg && g_pCfg->m_tConf ( "index" ) && g_pCfg->m_tConf["index"]( sIndex.cstr() ) ) { if ( !sphFixupIndexSettings ( tNewIndex.m_pIndex, g_pCfg->m_tConf["index"][sIndex.cstr()], sError ) ) { sphWarning ( "rotating index '%s': fixup: %s; using old index", sIndex.cstr(), sError.cstr() ); g_tRotateConfigMutex.Unlock (); return; } } g_tRotateConfigMutex.Unlock(); if ( !CheckIndex ( tNewIndex.m_pIndex, sError ) ) { sphWarning ( "rotating index '%s': check: %s; using old index", sIndex.cstr(), sError.cstr() ); return; } if ( !tNewIndex.m_pIndex->Preread() ) { sphWarning ( "rotating index '%s': preread failed: %s; using old index", sIndex.cstr(), tNewIndex.m_pIndex->GetLastError().cstr() ); return; } ////////////////////// // activate new index ////////////////////// sphLogDebug ( "activate new index" ); ServedIndex_t * pServed = g_pIndexes->GetWlockedEntry ( sIndex ); if ( !CheckServedEntry ( pServed, sIndex.cstr() ) ) { if ( pServed ) pServed->Unlock(); return; } CSphIndex * pOld = pServed->m_pIndex; CSphIndex * pNew = tNewIndex.m_pIndex; // rename files // FIXME! factor out a common function w/ non-threaded rotation code char sOld [ SPH_MAX_FILENAME_LEN ]; snprintf ( sOld, sizeof(sOld), "%s.old", pServed->m_sIndexPath.cstr() ); if ( !pServed->m_bOnlyNew && !pOld->Rename ( sOld ) ) { // FIXME! rollback inside Rename() call potentially fail sphWarning ( "rotating index '%s': cur to old rename failed: %s", sIndex.cstr(), pOld->GetLastError().cstr() ); } else { // FIXME! at this point there's no cur lock file; ie. potential race sphLogDebug ( "no cur lock file; ie. potential race" ); if ( !pNew->Rename ( pServed->m_sIndexPath.cstr() ) ) { sphWarning ( "rotating index '%s': new to cur rename failed: %s", sIndex.cstr(), pNew->GetLastError().cstr() ); if ( !pServed->m_bOnlyNew && !pOld->Rename ( pServed->m_sIndexPath.cstr() ) ) { sphWarning ( "rotating index '%s': old to cur rename failed: %s; INDEX UNUSABLE", sIndex.cstr(), pOld->GetLastError().cstr() ); pServed->m_bEnabled = false; } } else { // all went fine; swap them sphLogDebug ( "all went fine; swap them" ); if ( !tNewIndex.m_pIndex->GetTokenizer() ) tNewIndex.m_pIndex->SetTokenizer ( pServed->m_pIndex->LeakTokenizer() ); if ( !tNewIndex.m_pIndex->GetDictionary() ) tNewIndex.m_pIndex->SetDictionary ( pServed->m_pIndex->LeakDictionary() ); Swap ( pServed->m_pIndex, tNewIndex.m_pIndex ); pServed->m_bEnabled = true; // unlink .old sphLogDebug ( "unlink .old" ); if ( g_bUnlinkOld && !pServed->m_bOnlyNew ) { char sFile [ SPH_MAX_FILENAME_LEN ]; for ( int i=0; im_bOnlyNew = false; sphInfo ( "rotating index '%s': success", sIndex.cstr() ); } } pServed->Unlock(); } void RotationThreadFunc ( void * ) { assert ( g_eWorkers==MPM_THREADS ); while ( !g_bRotateShutdown ) { // check if we have work to do if ( !g_iRotateCount ) { sphSleepMsec ( 50 ); continue; } g_tRotateQueueMutex.Lock(); if ( !g_dRotateQueue.GetLength() ) { g_tRotateQueueMutex.Unlock(); sphSleepMsec ( 50 ); continue; } CSphString sIndex = g_dRotateQueue.Pop(); g_sPrereading = sIndex.cstr(); g_tRotateQueueMutex.Unlock(); RotateIndexMT ( sIndex ); g_tRotateQueueMutex.Lock(); if ( !g_dRotateQueue.GetLength() ) { g_iRotateCount = Max ( 0, g_iRotateCount-1 ); sphInfo ( "rotating index: all indexes done" ); } g_sPrereading = NULL; g_tRotateQueueMutex.Unlock(); } } void IndexRotationDone () { #if !USE_WINDOWS if ( g_iRotationThrottle && g_eWorkers==MPM_PREFORK ) { ARRAY_FOREACH ( i, g_dChildren ) g_dTermChildren.Add ( g_dChildren[i] ); } else { // forcibly restart children serving persistent connections and/or preforked ones // FIXME! check how both signals are handled in both cases int iSignal = ( g_eWorkers==MPM_PREFORK ) ? SIGTERM : SIGHUP; ARRAY_FOREACH ( i, g_dChildren ) kill ( g_dChildren[i], iSignal ); } #endif g_iRotateCount = Max ( 0, g_iRotateCount-1 ); sphInfo ( "rotating finished" ); } void SeamlessTryToForkPrereader () { sphLogDebug ( "Invoked SeamlessTryToForkPrereader" ); // next in line const char * sPrereading = g_dRotating.Pop (); if ( !sPrereading || !g_pIndexes->Exists ( sPrereading ) ) { sphWarning ( "INTERNAL ERROR: preread attempt on unknown index '%s'", sPrereading ? sPrereading : "(NULL)" ); return; } const ServedIndex_t & tServed = g_pIndexes->GetUnlockedEntry ( sPrereading ); // alloc buffer index (once per run) if ( !g_pPrereading ) g_pPrereading = sphCreateIndexPhrase ( NULL, NULL ); // FIXME! check if it's ok g_pPrereading->SetEnableStar ( tServed.m_bStar ); g_pPrereading->m_bExpandKeywords = tServed.m_bExpand; g_pPrereading->SetPreopen ( tServed.m_bPreopen || g_bPreopenIndexes ); g_pPrereading->SetWordlistPreload ( !tServed.m_bOnDiskDict && !g_bOnDiskDicts ); // rebase buffer index char sNewPath [ SPH_MAX_FILENAME_LEN ]; snprintf ( sNewPath, sizeof(sNewPath), "%s.new", tServed.m_sIndexPath.cstr() ); g_pPrereading->SetBase ( sNewPath ); // prealloc enough RAM and lock new index sphLogDebug ( "prealloc enough RAM and lock new index" ); CSphString sWarn, sError; if ( !g_pPrereading->Prealloc ( tServed.m_bMlock, g_bStripPath, sWarn ) ) { sphWarning ( "rotating index '%s': prealloc: %s; using old index", sPrereading, g_pPrereading->GetLastError().cstr() ); if ( !sWarn.IsEmpty() ) sphWarning ( "rotating index: %s", sWarn.cstr() ); return; } if ( !sWarn.IsEmpty() ) sphWarning ( "rotating index: %s: %s", sPrereading, sWarn.cstr() ); if ( !g_pPrereading->Lock() ) { sphWarning ( "rotating index '%s': lock: %s; using old index", sPrereading, g_pPrereading->GetLastError().cstr() ); g_pPrereading->Dealloc (); return; } if ( tServed.m_bOnlyNew && g_pCfg && g_pCfg->m_tConf.Exists ( "index" ) && g_pCfg->m_tConf["index"].Exists ( sPrereading ) ) if ( !sphFixupIndexSettings ( g_pPrereading, g_pCfg->m_tConf["index"][sPrereading], sError ) ) { sphWarning ( "rotating index '%s': fixup: %s; using old index", sPrereading, sError.cstr() ); return; } if ( !CheckIndex ( g_pPrereading, sError ) ) { sphWarning ( "rotating index '%s': check: %s; using old index", sPrereading, sError.cstr() ); return; } // fork async reader sphLogDebug ( "fork async reader" ); g_sPrereading = sPrereading; int iPipeFD = PipeAndFork ( true, SPH_PIPE_PREREAD ); // in parent, wait for prereader process to finish if ( g_bHeadDaemon ) return; // in child, do preread bool bRes = g_pPrereading->Preread (); if ( !bRes ) sphWarning ( "rotating index '%s': preread failed: %s; using old index", g_sPrereading, g_pPrereading->GetLastError().cstr() ); // report and exit DWORD uTmp = SPH_PIPE_PREREAD; sphWrite ( iPipeFD, &uTmp, sizeof(DWORD) ); // FIXME? add buffering/checks? uTmp = bRes; sphWrite ( iPipeFD, &uTmp, sizeof(DWORD) ); ::close ( iPipeFD ); sphLogDebug ( "SeamlessTryToForkPrereader: finishing the fork and invoking exit ( 0 )" ); exit ( 0 ); } void SeamlessForkPrereader () { sphLogDebug ( "Invoked SeamlessForkPrereader" ); // sanity checks if ( g_sPrereading ) { sphWarning ( "INTERNAL ERROR: preread attempt before previous completion" ); return; } // try candidates one by one while ( g_dRotating.GetLength() && !g_sPrereading ) SeamlessTryToForkPrereader (); // if there's no more candidates, and nothing in the works, we're done if ( !g_sPrereading && !g_dRotating.GetLength() ) IndexRotationDone (); } /// simple wrapper to simplify reading from pipes struct PipeReader_t { explicit PipeReader_t ( int iFD ) : m_iFD ( iFD ) , m_bError ( false ) { #if !USE_WINDOWS if ( fcntl ( iFD, F_SETFL, 0 )<0 ) sphWarning ( "fcntl(0) on pipe failed (error=%s)", strerror(errno) ); #endif } ~PipeReader_t () { SafeClose ( m_iFD ); } int GetFD () const { return m_iFD; } bool IsError () const { return m_bError; } int GetInt () { int iTmp; if ( !GetBytes ( &iTmp, sizeof(iTmp) ) ) iTmp = 0; return iTmp; } CSphString GetString () { int iLen = GetInt (); CSphString sRes; sRes.Reserve ( iLen ); if ( !GetBytes ( const_cast ( sRes.cstr() ), iLen ) ) sRes = ""; return sRes; } protected: bool GetBytes ( void * pBuf, int iCount ) { if ( m_bError ) return false; if ( m_iFD<0 ) { m_bError = true; sphWarning ( "invalid pipe fd" ); return false; } for ( ;; ) { int iRes = ::read ( m_iFD, pBuf, iCount ); if ( iRes<0 && errno==EINTR ) continue; if ( iRes!=iCount ) { m_bError = true; sphWarning ( "pipe read failed (exp=%d, res=%d, error=%s)", iCount, iRes, iRes>0 ? "(none)" : strerror(errno) ); return false; } return true; } } protected: int m_iFD; bool m_bError; }; /// handle pipe notifications from prereading void HandlePipePreread ( PipeReader_t & tPipe, bool bFailure ) { if ( bFailure ) { // clean up previous one and launch next one g_sPrereading = NULL; // in any case, buffer index should now be deallocated g_pPrereading->Dealloc (); g_pPrereading->Unlock (); // work next one SeamlessForkPrereader (); return; } assert ( g_iRotateCount && g_bSeamlessRotate && g_sPrereading ); // whatever the outcome, we will be done with this one const char * sPrereading = g_sPrereading; g_sPrereading = NULL; // notice that this will block! int iRes = tPipe.GetInt(); if ( !tPipe.IsError() && iRes ) { // if preread was succesful, exchange served index and prereader buffer index ServedIndex_t & tServed = g_pIndexes->GetUnlockedEntry ( sPrereading ); CSphIndex * pOld = tServed.m_pIndex; CSphIndex * pNew = g_pPrereading; char sOld [ SPH_MAX_FILENAME_LEN ]; snprintf ( sOld, sizeof(sOld), "%s.old", tServed.m_sIndexPath.cstr() ); if ( !tServed.m_bOnlyNew && !pOld->Rename ( sOld ) ) { // FIXME! rollback inside Rename() call potentially fail sphWarning ( "rotating index '%s': cur to old rename failed: %s", sPrereading, pOld->GetLastError().cstr() ); } else { // FIXME! at this point there's no cur lock file; ie. potential race if ( !pNew->Rename ( tServed.m_sIndexPath.cstr() ) ) { sphWarning ( "rotating index '%s': new to cur rename failed: %s", sPrereading, pNew->GetLastError().cstr() ); if ( !tServed.m_bOnlyNew && !pOld->Rename ( tServed.m_sIndexPath.cstr() ) ) { sphWarning ( "rotating index '%s': old to cur rename failed: %s; INDEX UNUSABLE", sPrereading, pOld->GetLastError().cstr() ); tServed.m_bEnabled = false; } } else { // all went fine; swap them if ( !g_pPrereading->GetTokenizer () ) g_pPrereading->SetTokenizer ( tServed.m_pIndex->LeakTokenizer () ); if ( !g_pPrereading->GetDictionary () ) g_pPrereading->SetDictionary ( tServed.m_pIndex->LeakDictionary () ); Swap ( tServed.m_pIndex, g_pPrereading ); tServed.m_bEnabled = true; // unlink .old if ( g_bUnlinkOld && !tServed.m_bOnlyNew ) { char sFile [ SPH_MAX_FILENAME_LEN ]; for ( int i=0; iDealloc (); g_pPrereading->Unlock (); // work next one SeamlessForkPrereader (); } /// check if there are any notifications from the children and handle them void CheckPipes () { ARRAY_FOREACH ( i, g_dPipes ) { // try to get status code DWORD uStatus; int iRes = ::read ( g_dPipes[i].m_iFD, &uStatus, sizeof(DWORD) ); // no data yet? if ( iRes==-1 && errno==EAGAIN ) continue; // either if there's eof, or error, or valid data - this pipe is over PipeReader_t tPipe ( g_dPipes[i].m_iFD ); int iHandler = g_dPipes[i].m_iHandler; g_dPipes.Remove ( i-- ); // check for eof/error bool bFailure = false; if ( iRes!=sizeof(DWORD) ) { bFailure = true; if ( iHandler<0 ) continue; // no handler; we're not expecting anything if ( iRes!=0 || iHandler>=0 ) sphWarning ( "pipe status read failed (handler=%d)", iHandler ); } // check for handler/status mismatch if ( !bFailure && ( iHandler>=0 && (int)uStatus!=iHandler ) ) { bFailure = true; sphWarning ( "INTERNAL ERROR: pipe status mismatch (handler=%d, status=%d)", iHandler, uStatus ); } // check for handler promotion (ie: we did not expect anything particular, but something happened anyway) if ( !bFailure && iHandler<0 ) iHandler = (int)uStatus; // run the proper handler switch ( iHandler ) { case SPH_PIPE_PREREAD: HandlePipePreread ( tPipe, bFailure ); break; default: if ( !bFailure ) sphWarning ( "INTERNAL ERROR: unknown pipe handler (handler=%d, status=%d)", iHandler, uStatus ); break; } } } void ConfigureIndex ( ServedIndex_t & tIdx, const CSphConfigSection & hIndex ) { tIdx.m_bMlock = ( hIndex.GetInt ( "mlock", 0 )!=0 ) && !g_bOptNoLock; tIdx.m_bStar = ( hIndex.GetInt ( "enable_star", 0 )!=0 ); tIdx.m_bExpand = ( hIndex.GetInt ( "expand_keywords", 0 )!=0 ); tIdx.m_bPreopen = ( hIndex.GetInt ( "preopen", 0 )!=0 ); tIdx.m_bOnDiskDict = ( hIndex.GetInt ( "ondisk_dict", 0 )!=0 ); } bool PrereadNewIndex ( ServedIndex_t & tIdx, const CSphConfigSection & hIndex, const char * szIndexName ) { CSphString sWarning; if ( !tIdx.m_pIndex->Prealloc ( tIdx.m_bMlock, g_bStripPath, sWarning ) || !tIdx.m_pIndex->Preread() ) { sphWarning ( "index '%s': preload: %s; NOT SERVING", szIndexName, tIdx.m_pIndex->GetLastError().cstr() ); return false; } if ( !sWarning.IsEmpty() ) sphWarning ( "index '%s': %s", szIndexName, sWarning.cstr() ); CSphString sError; if ( !sphFixupIndexSettings ( tIdx.m_pIndex, hIndex, sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", szIndexName, sError.cstr() ); return false; } // try to lock it if ( !g_bOptNoLock && !tIdx.m_pIndex->Lock() ) { sphWarning ( "index '%s': lock: %s; NOT SERVING", szIndexName, tIdx.m_pIndex->GetLastError().cstr() ); return false; } return true; } bool ConfigureAgent ( AgentDesc_t & tAgent, const CSphVariant * pAgent, const char * szIndexName, bool bBlackhole ) { // extract host name or path const char * p = pAgent->cstr(); while ( sphIsAlpha(*p) || *p=='.' || *p=='-' || *p=='/' ) p++; if ( p==pAgent->cstr() ) { sphWarning ( "index '%s': agent '%s': host name or path expected - SKIPPING AGENT", szIndexName, pAgent->cstr() ); return false; } if ( *p++!=':' ) { sphWarning ( "index '%s': agent '%s': colon expected near '%s' - SKIPPING AGENT", szIndexName, pAgent->cstr(), p ); return false; } CSphString sSub = pAgent->SubString ( 0, p-1-pAgent->cstr() ); if ( sSub.cstr()[0]=='/' ) { #if USE_WINDOWS sphWarning ( "index '%s': agent '%s': UNIX sockets are not supported on Windows - SKIPPING AGENT", szIndexName, pAgent->cstr() ); return false; #else if ( strlen ( sSub.cstr() ) + 1 > sizeof(((struct sockaddr_un *)0)->sun_path) ) { sphWarning ( "index '%s': agent '%s': UNIX socket path is too long - SKIPPING AGENT", szIndexName, pAgent->cstr() ); return false; } tAgent.m_iFamily = AF_UNIX; tAgent.m_sPath = sSub; p--; #endif } else { tAgent.m_iFamily = AF_INET; tAgent.m_sHost = sSub; // extract port if ( !isdigit(*p) ) { sphWarning ( "index '%s': agent '%s': port number expected near '%s' - SKIPPING AGENT", szIndexName, pAgent->cstr(), p ); return false; } tAgent.m_iPort = atoi(p); if ( !IsPortInRange ( tAgent.m_iPort ) ) { sphWarning ( "index '%s': agent '%s': invalid port number near '%s' - SKIPPING AGENT", szIndexName, pAgent->cstr(), p ); return false; } while ( isdigit(*p) ) p++; } // extract index list if ( *p++!=':' ) { sphWarning ( "index '%s': agent '%s': colon expected near '%s' - SKIPPING AGENT", szIndexName, pAgent->cstr(), p ); return false; } while ( isspace(*p) ) p++; const char * sIndexList = p; while ( sphIsAlpha(*p) || isspace(*p) || *p==',' ) p++; if ( *p ) { sphWarning ( "index '%s': agent '%s': index list expected near '%s' - SKIPPING AGENT", szIndexName, pAgent->cstr(), p ); return false; } tAgent.m_sIndexes = sIndexList; // lookup address (if needed) if ( tAgent.m_iFamily==AF_INET ) { tAgent.m_uAddr = sphGetAddress ( tAgent.m_sHost.cstr() ); if ( tAgent.m_uAddr==0 ) { sphWarning ( "index '%s': agent '%s': failed to lookup host name '%s' (error=%s) - SKIPPING AGENT", szIndexName, pAgent->cstr(), tAgent.m_sHost.cstr(), sphSockError() ); return false; } } tAgent.m_bBlackhole = bBlackhole; // allocate stats slot if ( g_pStats ) { g_tStatsMutex.Lock(); for ( int i=0; im_bmAgentStats[i]!=0xffffffffUL ) { int j = FindBit ( g_pStats->m_bmAgentStats[i] ); g_pStats->m_bmAgentStats[i] |= ( 1<m_dAgentStats[tAgent.m_iStatsIndex], 0, sizeof(AgentStats_t) ); break; } g_tStatsMutex.Unlock(); } return true; } static DistributedIndex_t ConfigureDistributedIndex ( const char * szIndexName, const CSphConfigSection & hIndex ) { assert ( hIndex("type") && hIndex["type"]=="distributed" ); DistributedIndex_t tIdx; // add local agents for ( CSphVariant * pLocal = hIndex("local"); pLocal; pLocal = pLocal->m_pNext ) { if ( !g_pIndexes->Exists ( pLocal->cstr() ) ) { sphWarning ( "index '%s': no such local index '%s' - SKIPPING LOCAL INDEX", szIndexName, pLocal->cstr() ); continue; } tIdx.m_dLocal.Add ( pLocal->cstr() ); } // add remote agents for ( CSphVariant * pAgent = hIndex("agent"); pAgent; pAgent = pAgent->m_pNext ) { AgentDesc_t tAgent; if ( ConfigureAgent ( tAgent, pAgent, szIndexName, false ) ) tIdx.m_dAgents.Add ( tAgent ); } for ( CSphVariant * pAgent = hIndex("agent_blackhole"); pAgent; pAgent = pAgent->m_pNext ) { AgentDesc_t tAgent; if ( ConfigureAgent ( tAgent, pAgent, szIndexName, true ) ) tIdx.m_dAgents.Add ( tAgent ); } // configure options if ( hIndex("agent_connect_timeout") ) { if ( hIndex["agent_connect_timeout"].intval()<=0 ) sphWarning ( "index '%s': connect_timeout must be positive, ignored", szIndexName ); else tIdx.m_iAgentConnectTimeout = hIndex["agent_connect_timeout"].intval(); } if ( hIndex("agent_query_timeout") ) { if ( hIndex["agent_query_timeout"].intval()<=0 ) sphWarning ( "index '%s': query_timeout must be positive, ignored", szIndexName ); else tIdx.m_iAgentQueryTimeout = hIndex["agent_query_timeout"].intval(); } return tIdx; } void FreeAgentStats ( DistributedIndex_t & tIndex ) { if ( !g_pStats ) return; g_tStatsMutex.Lock(); ARRAY_FOREACH ( i, tIndex.m_dAgents ) { int iIndex = tIndex.m_dAgents[i].m_iStatsIndex; if ( iIndex<0 || iIndex>=STATS_MAX_AGENTS ) continue; assert ( g_pStats->m_bmAgentStats[iIndex>>5] & ( 1UL<<( iIndex & 31 ) ) ); g_pStats->m_bmAgentStats[iIndex>>5] &= ~( 1UL<<( iIndex & 31 ) ); } g_tStatsMutex.Unlock(); } ESphAddIndex AddIndex ( const char * szIndexName, const CSphConfigSection & hIndex ) { if ( hIndex("type") && hIndex["type"]=="distributed" ) { /////////////////////////////// // configure distributed index /////////////////////////////// DistributedIndex_t tIdx = ConfigureDistributedIndex ( szIndexName, hIndex ); // finally, check and add distributed index to global table if ( tIdx.m_dAgents.GetLength()==0 && tIdx.m_dLocal.GetLength()==0 ) { FreeAgentStats ( tIdx ); sphWarning ( "index '%s': no valid local/remote indexes in distributed index - NOT SERVING", szIndexName ); return ADD_ERROR; } else { g_tDistLock.Lock (); if ( !g_hDistIndexes.Add ( tIdx, szIndexName ) ) { g_tDistLock.Unlock (); FreeAgentStats ( tIdx ); sphWarning ( "index '%s': duplicate name - NOT SERVING", szIndexName ); return ADD_ERROR; } g_tDistLock.Unlock (); } return ADD_DISTR; } else if ( hIndex("type") && hIndex["type"]=="rt" ) { //////////////////////////// // configure realtime index //////////////////////////// if ( g_eWorkers!=MPM_THREADS ) { sphWarning ( "index '%s': RT index requires workers=threads - NOT SERVING", szIndexName ); return ADD_ERROR; } CSphString sError; CSphSchema tSchema ( szIndexName ); if ( !sphRTSchemaConfigure ( hIndex, &tSchema, &sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", szIndexName, sError.cstr() ); return ADD_ERROR; } // path if ( !hIndex("path") ) { sphWarning ( "index '%s': path must be specified - NOT SERVING", szIndexName ); return ADD_ERROR; } // RAM chunk size DWORD uRamSize = hIndex.GetSize ( "rt_mem_limit", 32*1024*1024 ); if ( uRamSize<128*1024 ) { sphWarning ( "index '%s': rt_mem_limit extremely low, using 128K instead", szIndexName ); uRamSize = 128*1024; } else if ( uRamSize<8*1024*1024 ) sphWarning ( "index '%s': rt_mem_limit very low (under 8 MB)", szIndexName ); // index ServedIndex_t tIdx; bool bWordDict = strcmp ( hIndex.GetStr ( "dict", "" ), "keywords" )==0; tIdx.m_pIndex = sphCreateIndexRT ( tSchema, szIndexName, uRamSize, hIndex["path"].cstr(), bWordDict ); tIdx.m_bEnabled = false; tIdx.m_sIndexPath = hIndex["path"]; tIdx.m_bRT = true; ConfigureIndex ( tIdx, hIndex ); tIdx.m_pIndex->SetEnableStar ( tIdx.m_bStar ); tIdx.m_pIndex->m_iExpansionLimit = g_iExpansionLimit; tIdx.m_pIndex->SetPreopen ( tIdx.m_bPreopen || g_bPreopenIndexes ); tIdx.m_pIndex->SetWordlistPreload ( !tIdx.m_bOnDiskDict && !g_bOnDiskDicts ); // pick config settings // they should be overriden later by Preload() if needed CSphIndexSettings tSettings; if ( !sphConfIndex ( hIndex, tSettings, sError ) ) { sphWarning ( "ERROR: index '%s': %s - NOT SERVING", szIndexName, sError.cstr() ); return ADD_ERROR; } tIdx.m_pIndex->Setup ( tSettings ); // hash it if ( !g_pIndexes->Add ( tIdx, szIndexName ) ) { sphWarning ( "INTERNAL ERROR: index '%s': hash add failed - NOT SERVING", szIndexName ); return ADD_ERROR; } tIdx.Reset (); // so that the dtor wouln't delete everything return ADD_RT; } else if ( !hIndex("type") || hIndex["type"]=="plain" ) { ///////////////////////// // configure local index ///////////////////////// ServedIndex_t tIdx; // check path if ( !hIndex.Exists ( "path" ) ) { sphWarning ( "index '%s': key 'path' not found - NOT SERVING", szIndexName ); return ADD_ERROR; } // check name if ( g_pIndexes->Exists ( szIndexName ) ) { sphWarning ( "index '%s': duplicate name - NOT SERVING", szIndexName ); return ADD_ERROR; } // configure memlocking, star ConfigureIndex ( tIdx, hIndex ); // try to create index CSphString sWarning; tIdx.m_pIndex = sphCreateIndexPhrase ( szIndexName, hIndex["path"].cstr() ); tIdx.m_pIndex->SetEnableStar ( tIdx.m_bStar ); tIdx.m_pIndex->m_bExpandKeywords = tIdx.m_bExpand; tIdx.m_pIndex->m_iExpansionLimit = g_iExpansionLimit; tIdx.m_pIndex->SetPreopen ( tIdx.m_bPreopen || g_bPreopenIndexes ); tIdx.m_pIndex->SetWordlistPreload ( !tIdx.m_bOnDiskDict && !g_bOnDiskDicts ); tIdx.m_bEnabled = false; // done tIdx.m_sIndexPath = hIndex["path"]; if ( !g_pIndexes->Add ( tIdx, szIndexName ) ) { sphWarning ( "INTERNAL ERROR: index '%s': hash add failed - NOT SERVING", szIndexName ); return ADD_ERROR; } tIdx.Reset (); // so that the dtor wouldn't delete everything return ADD_LOCAL; } else { // unknown type sphWarning ( "index '%s': unknown type '%s' - NOT SERVING", szIndexName, hIndex["type"].cstr() ); return ADD_ERROR; } } bool CheckConfigChanges () { struct stat tStat; memset ( &tStat, 0, sizeof ( tStat ) ); if ( stat ( g_sConfigFile.cstr (), &tStat ) < 0 ) memset ( &tStat, 0, sizeof ( tStat ) ); DWORD uCRC32 = 0; sphCalcFileCRC32 ( g_sConfigFile.cstr (), uCRC32 ); if ( g_uCfgCRC32==uCRC32 && tStat.st_mtime==g_tCfgStat.st_mtime && tStat.st_ctime==g_tCfgStat.st_ctime && tStat.st_size==g_tCfgStat.st_size ) return false; g_uCfgCRC32 = uCRC32; g_tCfgStat = tStat; return true; } void ReloadIndexSettings ( CSphConfigParser * pCP ) { assert ( pCP ); if ( !pCP->Parse ( g_sConfigFile.cstr () ) ) { sphWarning ( "failed to parse config file '%s'; using previous settings", g_sConfigFile.cstr () ); return; } g_bDoDelete = false; for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) it.Get().m_bToDelete = true; ///< FIXME! What about write lock before doing this? g_hDistIndexes.IterateStart (); while ( g_hDistIndexes.IterateNext () ) g_hDistIndexes.IterateGet().m_bToDelete = true; int nTotalIndexes = g_pIndexes->GetLength () + g_hDistIndexes.GetLength (); int nChecked = 0; const CSphConfig & hConf = pCP->m_tConf; hConf["index"].IterateStart (); while ( hConf["index"].IterateNext() ) { const CSphConfigSection & hIndex = hConf["index"].IterateGet(); const char * sIndexName = hConf["index"].IterateGetKey().cstr(); ServedIndex_t * pServedIndex = g_pIndexes->GetWlockedEntry ( sIndexName ); if ( pServedIndex ) { ConfigureIndex ( *pServedIndex, hIndex ); pServedIndex->m_bToDelete = false; nChecked++; pServedIndex->Unlock(); } else if ( g_hDistIndexes.Exists ( sIndexName ) && hIndex.Exists("type") && hIndex["type"]=="distributed" ) { DistributedIndex_t tIdx = ConfigureDistributedIndex ( sIndexName, hIndex ); // finally, check and add distributed index to global table if ( tIdx.m_dAgents.GetLength()==0 && tIdx.m_dLocal.GetLength()==0 ) { FreeAgentStats ( tIdx ); sphWarning ( "index '%s': no valid local/remote indexes in distributed index; using last valid definition", sIndexName ); g_hDistIndexes[sIndexName].m_bToDelete = false; } else { g_tDistLock.Lock(); FreeAgentStats ( g_hDistIndexes[sIndexName] ); g_hDistIndexes[sIndexName] = tIdx; g_tDistLock.Unlock(); } nChecked++; } else if ( AddIndex ( sIndexName, hIndex )==ADD_LOCAL ) { ServedIndex_t * pIndex = g_pIndexes->GetWlockedEntry ( sIndexName ); if ( pIndex ) { pIndex->m_bOnlyNew = true; pIndex->Unlock(); } } } if ( nChecked < nTotalIndexes ) g_bDoDelete = true; } void CheckDelete () { if ( !g_bDoDelete ) return; if ( g_dChildren.GetLength() ) return; CSphVector dToDelete; CSphVector dDistToDelete; dToDelete.Reserve ( 8 ); dDistToDelete.Reserve ( 8 ); for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { ServedIndex_t & tIndex = it.Get(); if ( tIndex.m_bToDelete ) dToDelete.Add ( &it.GetKey() ); } g_hDistIndexes.IterateStart (); while ( g_hDistIndexes.IterateNext () ) { DistributedIndex_t & tIndex = g_hDistIndexes.IterateGet (); if ( tIndex.m_bToDelete ) dDistToDelete.Add ( &g_hDistIndexes.IterateGetKey () ); } ARRAY_FOREACH ( i, dToDelete ) g_pIndexes->Delete ( *dToDelete[i] ); // should result in automatic CSphIndex::Unlock() via dtor call g_tDistLock.Lock(); ARRAY_FOREACH ( i, dDistToDelete ) { FreeAgentStats ( g_hDistIndexes [ *dDistToDelete[i] ] ); g_hDistIndexes.Delete ( *dDistToDelete[i] ); } g_tDistLock.Unlock(); g_bDoDelete = false; } void CheckRotate () { // do we need to rotate now? if ( !g_iRotateCount ) return; sphLogDebug ( "CheckRotate invoked" ); ///////////////////// // RAM-greedy rotate ///////////////////// if ( !g_bSeamlessRotate || g_eWorkers==MPM_PREFORK ) { // wait until there's no running queries if ( g_dChildren.GetLength() && g_eWorkers!=MPM_PREFORK ) return; CSphConfigParser * pCP = NULL; if ( CheckConfigChanges () ) { pCP = new CSphConfigParser; ReloadIndexSettings ( pCP ); } for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { ServedIndex_t & tIndex = it.Get(); tIndex.WriteLock(); const char * sIndex = it.GetKey().cstr(); assert ( tIndex.m_pIndex ); bool bWasAdded = tIndex.m_bOnlyNew; RotateIndexGreedy ( tIndex, sIndex ); if ( bWasAdded && tIndex.m_bEnabled ) { if ( !pCP ) { pCP = new CSphConfigParser; ReloadIndexSettings ( pCP ); } const CSphConfigType & hConf = pCP->m_tConf ["index"]; if ( hConf.Exists ( sIndex ) ) { CSphString sError; if ( !sphFixupIndexSettings ( tIndex.m_pIndex, hConf [sIndex], sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", sIndex, sError.cstr() ); tIndex.m_bEnabled = false; } if ( tIndex.m_bEnabled && !CheckIndex ( tIndex.m_pIndex, sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", sIndex, sError.cstr() ); tIndex.m_bEnabled = false; } } } tIndex.Unlock(); } SafeDelete ( pCP ); IndexRotationDone (); return; } /////////////////// // seamless rotate /////////////////// if ( g_dRotating.GetLength() || g_dRotateQueue.GetLength() || g_sPrereading ) return; // rotate in progress already; will be handled in CheckPipes() g_tRotateConfigMutex.Lock(); SafeDelete ( g_pCfg ); if ( CheckConfigChanges() ) { g_pCfg = new CSphConfigParser; ReloadIndexSettings ( g_pCfg ); } g_tRotateConfigMutex.Unlock(); int iRotIndexes = 0; // check what indexes need to be rotated for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { const ServedIndex_t & tIndex = it.Get(); const CSphString & sIndex = it.GetKey(); assert ( tIndex.m_pIndex ); CSphString sNewPath; sNewPath.SetSprintf ( "%s.new", tIndex.m_sIndexPath.cstr() ); // check if there's a .new index incoming // FIXME? move this code to index, and also check for exists-but-not-readable CSphString sTmp; sTmp.SetSprintf ( "%s.sph", sNewPath.cstr() ); if ( !sphIsReadable ( sTmp.cstr() ) ) { sphLogDebug ( "%s.sph is not readable. Skipping", sNewPath.cstr() ); continue; } if ( g_eWorkers==MPM_THREADS ) { g_tRotateQueueMutex.Lock(); g_dRotateQueue.Add ( sIndex ); g_tRotateQueueMutex.Unlock(); } else { g_dRotating.Add ( sIndex.cstr() ); if ( !( tIndex.m_bPreopen || g_bPreopenIndexes ) ) sphWarning ( "rotating index '%s' without preopen option; use per-index propen=1 or searchd preopen_indexes=1", sIndex.cstr() ); } iRotIndexes++; } if ( !iRotIndexes ) { sphWarning ( "INTERNAL ERROR: nothing to rotate after SIGHUP" ); g_iRotateCount = Max ( 0, g_iRotateCount-1 ); } if ( g_eWorkers!=MPM_THREADS && iRotIndexes ) SeamlessForkPrereader (); } void CheckReopen () { if ( !g_bGotSigusr1 ) return; // reopen searchd log if ( g_iLogFile>=0 && !g_bLogTty ) { int iFD = ::open ( g_sLogFile.cstr(), O_CREAT | O_RDWR | O_APPEND, S_IREAD | S_IWRITE ); if ( iFD<0 ) { sphWarning ( "failed to reopen log file '%s': %s", g_sLogFile.cstr(), strerror(errno) ); } else { ::close ( g_iLogFile ); g_iLogFile = iFD; g_bLogTty = ( isatty ( g_iLogFile )!=0 ); sphInfo ( "log reopened" ); } } // reopen query log if ( !g_bQuerySyslog && g_iQueryLogFile!=g_iLogFile && g_iQueryLogFile>=0 && !isatty ( g_iQueryLogFile ) ) { int iFD = ::open ( g_sQueryLogFile.cstr(), O_CREAT | O_RDWR | O_APPEND, S_IREAD | S_IWRITE ); if ( iFD<0 ) { sphWarning ( "failed to reopen query log file '%s': %s", g_sQueryLogFile.cstr(), strerror(errno) ); } else { ::close ( g_iQueryLogFile ); g_iQueryLogFile = iFD; sphInfo ( "query log reopened" ); } } g_bGotSigusr1 = 0; } static void SaveIndexes () { for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { const ServedIndex_t & tServed = it.Get(); tServed.ReadLock(); if ( tServed.m_bEnabled ) { if ( !tServed.m_pIndex->SaveAttributes () ) sphWarning ( "index %s: attrs save failed: %s", it.GetKey().cstr(), tServed.m_pIndex->GetLastError().cstr() ); } tServed.Unlock(); } } static void ThdSaveIndexes ( void * ) { SaveIndexes (); // we're no more flushing g_tFlushMutex.Lock(); g_pFlush->m_bFlushing = false; g_tFlushMutex.Unlock(); } #if !USE_WINDOWS int PreforkChild (); #endif void CheckFlush () { if ( g_iAttrFlushPeriod<=0 || g_pFlush->m_bFlushing ) return; // do a periodic check, unless we have a forced check if ( !g_pFlush->m_bForceCheck ) { static int64_t tmLastCheck = -1000; int64_t tmNow = sphMicroTimer(); if ( ( tmLastCheck + int64_t(g_iAttrFlushPeriod)*I64C(1000000) )>=tmNow ) return; tmLastCheck = tmNow; sphLogDebug ( "attrflush: doing periodic check" ); } else { sphLogDebug ( "attrflush: doing forced check" ); } // check if there are dirty indexes bool bDirty = false; for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { const ServedIndex_t & tServed = it.Get(); if ( tServed.m_bEnabled && tServed.m_pIndex->GetAttributeStatus() ) { bDirty = true; break; } } // need to set this before clearing check flag if ( bDirty ) g_pFlush->m_bFlushing = true; // if there was a forced check in progress, it no longer is if ( g_pFlush->m_bForceCheck ) g_pFlush->m_bForceCheck = false; // nothing to do, no indexes were updated if ( !bDirty ) { sphLogDebug ( "attrflush: no dirty indexes found" ); return; } // launch the flush! g_pFlush->m_iFlushTag++; sphLogDebug ( "attrflush: starting writer, tag ( %d )", g_pFlush->m_iFlushTag ); #if !USE_WINDOWS if ( g_eWorkers==MPM_FORK || g_eWorkers==MPM_PREFORK ) { PreforkChild(); // FIXME! gracefully handle fork() failures, Windows, etc if ( g_bHeadDaemon ) { return; } // child process, do the work SaveIndexes (); g_pFlush->m_bFlushing = false; exit ( 0 ); } else #endif { ThdDesc_t tThd; if ( !sphThreadCreate ( &tThd.m_tThd, ThdSaveIndexes, NULL, true ) ) sphWarning ( "failed to create attribute save thread, error[%d] %s", errno, strerror(errno) ); } } #if !USE_WINDOWS #define WINAPI #else SERVICE_STATUS g_ss; SERVICE_STATUS_HANDLE g_ssHandle; void MySetServiceStatus ( DWORD dwCurrentState, DWORD dwWin32ExitCode, DWORD dwWaitHint ) { static DWORD dwCheckPoint = 1; if ( dwCurrentState==SERVICE_START_PENDING ) g_ss.dwControlsAccepted = 0; else g_ss.dwControlsAccepted = SERVICE_ACCEPT_STOP; g_ss.dwCurrentState = dwCurrentState; g_ss.dwWin32ExitCode = dwWin32ExitCode; g_ss.dwWaitHint = dwWaitHint; if ( dwCurrentState==SERVICE_RUNNING || dwCurrentState==SERVICE_STOPPED ) g_ss.dwCheckPoint = 0; else g_ss.dwCheckPoint = dwCheckPoint++; SetServiceStatus ( g_ssHandle, &g_ss ); } void WINAPI ServiceControl ( DWORD dwControlCode ) { switch ( dwControlCode ) { case SERVICE_CONTROL_STOP: MySetServiceStatus ( SERVICE_STOP_PENDING, NO_ERROR, 0 ); g_bServiceStop = true; break; default: MySetServiceStatus ( g_ss.dwCurrentState, NO_ERROR, 0 ); break; } } // warning! static buffer, non-reentrable const char * WinErrorInfo () { static char sBuf[1024]; DWORD uErr = ::GetLastError (); snprintf ( sBuf, sizeof(sBuf), "code=%d, error=", uErr ); int iLen = strlen(sBuf); if ( !FormatMessage ( FORMAT_MESSAGE_FROM_SYSTEM, NULL, uErr, 0, sBuf+iLen, sizeof(sBuf)-iLen, NULL ) ) // FIXME? force US-english langid? snprintf ( sBuf+iLen, sizeof(sBuf)-iLen, "(no message)" ); return sBuf; } SC_HANDLE ServiceOpenManager () { SC_HANDLE hSCM = OpenSCManager ( NULL, // local computer NULL, // ServicesActive database SC_MANAGER_ALL_ACCESS ); // full access rights if ( hSCM==NULL ) sphFatal ( "OpenSCManager() failed: %s", WinErrorInfo() ); return hSCM; } void AppendArg ( char * sBuf, int iBufLimit, const char * sArg ) { char * sBufMax = sBuf + iBufLimit - 2; // reserve place for opening space and trailing zero sBuf += strlen(sBuf); if ( sBuf>=sBufMax ) return; int iArgLen = strlen(sArg); bool bQuote = false; for ( int i=0; i=sBufMax ) return; *sBuf++ = '"'; while ( sBuf\tread configuration from specified file\n" "\t\t\t(default is sphinx.conf)\n" "--stop\t\t\tsend SIGTERM to currently running searchd\n" "--stopwait\t\tsend SIGTERM and wait until actual exit\n" "--status\t\tget ant print status variables\n" "\t\t\t(PID is taken from pid_file specified in config file)\n" "--iostats\t\tlog per-query io stats\n" #ifdef HAVE_CLOCK_GETTIME "--cpustats\t\tlog per-query cpu stats\n" #endif #if USE_WINDOWS "--install\t\tinstall as Windows service\n" "--delete\t\tdelete Windows service\n" "--servicename \tuse given service name (default is 'searchd')\n" #endif "\n" "Debugging options are:\n" "--console\t\trun in console mode (do not fork, do not log to files)\n" "-p, --port \tlisten on given port (overrides config setting)\n" "-l, --listen \tlisten on given address, port or path (overrides\n" "\t\t\tconfig settings)\n" "-i, --index \tonly serve one given index\n" "--logdebug\t\tenable additional debug information logging\n" #if !USE_WINDOWS "--nodetach\t\tdo not detach into background\n" #endif "\n" "Examples:\n" "searchd --config /usr/local/sphinx/etc/sphinx.conf\n" #if USE_WINDOWS "searchd --install --config c:\\sphinx\\sphinx.conf\n" #endif ); } template T * InitSharedBuffer ( CSphSharedBuffer & tBuffer, int iLen ) { CSphString sError, sWarning; if ( !tBuffer.Alloc ( iLen, sError, sWarning ) ) sphDie ( "failed to allocate shared buffer (msg=%s)", sError.cstr() ); T * pRes = tBuffer.GetWritePtr(); memset ( pRes, 0, iLen*sizeof(T) ); // reset return pRes; } #if USE_WINDOWS BOOL WINAPI CtrlHandler ( DWORD ) { if ( !g_bService ) { g_bGotSigterm = 1; sphInterruptNow(); } return TRUE; } #endif #if !USE_WINDOWS int PreforkChild () { // next one int iRes = fork(); if ( iRes==-1 ) sphFatal ( "fork() failed during prefork (error=%s)", strerror(errno) ); // child process if ( iRes==0 ) { g_bHeadDaemon = false; sphSetProcessInfo ( false ); return iRes; } // parent process g_dChildren.Add ( iRes ); return iRes; } // returns 'true' only once - at the very start, to show it beatiful way. bool SetWatchDog ( int iDevNull ) { InitSharedBuffer ( g_bDaemonAtShutdown, 1 ); // Fork #1 - detach from controlling terminal switch ( fork() ) { case -1: // error Shutdown (); sphFatal ( "fork() failed (reason: %s)", strerror ( errno ) ); exit ( 1 ); case 0: // daemonized child - or new and free watchdog :) break; default: // tty-controlled parent while ( g_tHaveTTY.ReadValue() ) sphSleepMsec ( 100 ); sphSetProcessInfo ( false ); exit ( 0 ); } // became the session leader if ( setsid()==-1 ) { Shutdown (); sphFatal ( "setsid() failed (reason: %s)", strerror ( errno ) ); exit ( 1 ); } // Fork #2 - detach from session leadership (may be not necessary, however) switch ( fork() ) { case -1: // error Shutdown (); sphFatal ( "fork() failed (reason: %s)", strerror ( errno ) ); exit ( 1 ); case 0: // daemonized child - or new and free watchdog :) break; default: // tty-controlled parent sphSetProcessInfo ( false ); exit ( 0 ); } // now we are the watchdog. Let us fork the actual process int iReincarnate = 1; bool bShutdown = false; bool bStreamsActive = true; int iRes = 0; for ( ;; ) { if ( iReincarnate!=0 ) iRes = fork(); if ( iRes==-1 ) { Shutdown (); sphFatal ( "fork() failed during watchdog setup (error=%s)", strerror(errno) ); } // child process; return true to show that we have to reload everything if ( iRes==0 ) { atexit ( &ReleaseTTYFlag ); return bStreamsActive; } // parent process, watchdog // close the io files if ( bStreamsActive ) { close ( STDIN_FILENO ); close ( STDOUT_FILENO ); close ( STDERR_FILENO ); dup2 ( iDevNull, STDIN_FILENO ); dup2 ( iDevNull, STDOUT_FILENO ); dup2 ( iDevNull, STDERR_FILENO ); bStreamsActive = false; } sphInfo ( "Child process %d has been forked", iRes ); SetSignalHandlers(); iReincarnate = 0; int iPid, iStatus; bool bDaemonAtShutdown = 0; while ( ( iPid = wait ( &iStatus ) )>0 ) { bDaemonAtShutdown = ( g_bDaemonAtShutdown[0]!=0 ); const char * sWillRestart = ( bDaemonAtShutdown ? "will not be restarted ( daemon is shutting down )" : "will be restarted" ); assert ( iPid==iRes ); if ( WIFEXITED ( iStatus ) ) { int iExit = WEXITSTATUS ( iStatus ); if ( iExit==2 ) // really crash { sphInfo ( "Child process %d has been finished by CRASH_EXIT (exit code 2), %s", iPid, sWillRestart ); iReincarnate = -1; } else { sphInfo ( "Child process %d has been finished, exit code %d. Watchdog finishes also. Good bye!", iPid, iExit ); bShutdown = true; } } else if ( WIFSIGNALED ( iStatus ) ) { if ( WTERMSIG ( iStatus )==SIGINT || WTERMSIG ( iStatus )==SIGTERM #if WATCHDOG_SIGKILL || WTERMSIG ( iStatus )==SIGKILL #endif ) { sphInfo ( "Child process %d has been killed with kill or sigterm (%i). Watchdog finishes also. Good bye!", iPid, WTERMSIG ( iStatus ) ); bShutdown = true; } else { if ( WCOREDUMP ( iStatus ) ) sphInfo ( "Child process %i has been killed with signal %i, core dumped, %s", iPid, WTERMSIG ( iStatus ), sWillRestart ); else sphInfo ( "Child process %i has been killed with signal %i, %s", iPid, WTERMSIG ( iStatus ), sWillRestart ); iReincarnate = -1; } } else if ( WIFSTOPPED ( iStatus ) ) sphInfo ( "Child %i stopped with signal %i", iPid, WSTOPSIG ( iStatus ) ); #ifdef WIFCONTINUED else if ( WIFCONTINUED ( iStatus ) ) sphInfo ( "Child %i resumed", iPid ); #endif } if ( bShutdown || g_bGotSigterm || bDaemonAtShutdown ) { Shutdown(); exit ( 0 ); } } } #endif // !USE_WINDOWS /// check for incoming signals, and react on them void CheckSignals () { #if USE_WINDOWS if ( g_bService && g_bServiceStop ) { Shutdown (); MySetServiceStatus ( SERVICE_STOPPED, NO_ERROR, 0 ); exit ( 0 ); } #endif if ( g_bGotSighup ) { g_tRotateQueueMutex.Lock(); g_iRotateCount++; g_tRotateQueueMutex.Unlock(); sphInfo ( "rotating indices (seamless=%d)", (int)g_bSeamlessRotate ); // this might hang if performed from SIGHUP g_bGotSighup = 0; } if ( g_bGotSigterm ) { assert ( g_bHeadDaemon ); sphInfo ( "caught SIGTERM, shutting down" ); Shutdown (); exit ( 0 ); } #if !USE_WINDOWS if ( g_bGotSigchld ) { // handle gone children for ( ;; ) { int iChildPid = waitpid ( -1, NULL, WNOHANG ); sphLogDebugvv ( "gone child %d ( %d )", iChildPid, g_dChildren.GetLength() ); // !COMMIT if ( iChildPid<=0 ) break; g_dChildren.RemoveValue ( iChildPid ); // FIXME! OPTIMIZE! can be slow } g_bGotSigchld = 0; // prefork more children, if needed if ( g_eWorkers==MPM_PREFORK ) while ( g_dChildren.GetLength() < g_iPreforkChildren ) if ( PreforkChild()==0 ) // child process? break from here, go work return; } #endif #if USE_WINDOWS BYTE dPipeInBuf [ WIN32_PIPE_BUFSIZE ]; DWORD nBytesRead = 0; BOOL bSuccess = ReadFile ( g_hPipe, dPipeInBuf, WIN32_PIPE_BUFSIZE, &nBytesRead, NULL ); if ( nBytesRead > 0 && bSuccess ) { for ( DWORD i=0; im_pNext ) { ListenerDesc_t tDesc = ParseListener ( v->cstr() ); if ( tDesc.m_eProto!=PROTO_SPHINX ) continue; int iSock = -1; #if !USE_WINDOWS if ( !tDesc.m_sUnix.IsEmpty() ) { // UNIX connection struct sockaddr_un uaddr; size_t len = strlen ( tDesc.m_sUnix.cstr() ); if ( len+1 > sizeof(uaddr.sun_path ) ) sphFatal ( "UNIX socket path is too long (len=%d)", (int)len ); memset ( &uaddr, 0, sizeof(uaddr) ); uaddr.sun_family = AF_UNIX; memcpy ( uaddr.sun_path, tDesc.m_sUnix.cstr(), len+1 ); iSock = socket ( AF_UNIX, SOCK_STREAM, 0 ); if ( iSock<0 ) sphFatal ( "failed to create UNIX socket: %s", sphSockError() ); if ( connect ( iSock, (struct sockaddr*)&uaddr, sizeof(uaddr) )<0 ) { sphWarning ( "failed to connect to unix://%s: %s\n", tDesc.m_sUnix.cstr(), sphSockError() ); continue; } } else #endif { // TCP connection struct sockaddr_in sin; memset ( &sin, 0, sizeof(sin) ); sin.sin_family = AF_INET; sin.sin_addr.s_addr = ( tDesc.m_uIP==htonl ( INADDR_ANY ) ) ? htonl ( INADDR_LOOPBACK ) : tDesc.m_uIP; sin.sin_port = htons ( (short)tDesc.m_iPort ); iSock = socket ( AF_INET, SOCK_STREAM, 0 ); if ( iSock<0 ) sphFatal ( "failed to create TCP socket: %s", sphSockError() ); if ( connect ( iSock, (struct sockaddr*)&sin, sizeof(sin) )<0 ) { sphWarning ( "failed to connect to %s:%d: %s\n", sphFormatIP ( sBuf, sizeof(sBuf), tDesc.m_uIP ), tDesc.m_iPort, sphSockError() ); continue; } } // send request NetOutputBuffer_c tOut ( iSock ); tOut.SendDword ( SPHINX_SEARCHD_PROTO ); tOut.SendWord ( SEARCHD_COMMAND_STATUS ); tOut.SendWord ( VER_COMMAND_STATUS ); tOut.SendInt ( 4 ); // request body length tOut.SendInt ( 1 ); // dummy body tOut.Flush (); // get reply NetInputBuffer_c tIn ( iSock ); if ( !tIn.ReadFrom ( 12, 5 ) ) // magic_header_size=12, magic_timeout=5 sphFatal ( "handshake failure (no response)" ); DWORD uVer = tIn.GetDword(); if ( uVer!=SPHINX_SEARCHD_PROTO && uVer!=0x01000000UL ) // workaround for all the revisions that sent it in host order... sphFatal ( "handshake failure (unexpected protocol version=%d)", uVer ); if ( tIn.GetWord()!=SEARCHD_OK ) sphFatal ( "status command failed" ); if ( tIn.GetWord()!=VER_COMMAND_STATUS ) sphFatal ( "status command version mismatch" ); if ( !tIn.ReadFrom ( tIn.GetDword(), 5 ) ) // magic_timeout=5 sphFatal ( "failed to read status reply" ); fprintf ( stdout, "\nsearchd status\n--------------\n" ); int iRows = tIn.GetDword(); int iCols = tIn.GetDword(); for ( int i=0; iBuildMessage() ); } fflush ( stdout ); } void FailClient ( int iSock, SearchdStatus_e eStatus, const char * sMessage ) { assert ( eStatus==SEARCHD_RETRY || eStatus==SEARCHD_ERROR ); int iRespLen = 4 + strlen(sMessage); NetOutputBuffer_c tOut ( iSock ); tOut.SendInt ( SPHINX_SEARCHD_PROTO ); tOut.SendWord ( (WORD)eStatus ); tOut.SendWord ( 0 ); // version doesn't matter tOut.SendInt ( iRespLen ); tOut.SendString ( sMessage ); tOut.Flush (); // FIXME? without some wait, client fails to receive the response on windows sphSockClose ( iSock ); } Listener_t * DoAccept ( int * pClientSock, char * sClientName ) { int iMaxFD = 0; fd_set fdsAccept; FD_ZERO ( &fdsAccept ); ARRAY_FOREACH ( i, g_dListeners ) { sphFDSet ( g_dListeners[i].m_iSock, &fdsAccept ); iMaxFD = Max ( iMaxFD, g_dListeners[i].m_iSock ); } iMaxFD++; struct timeval tvTimeout; tvTimeout.tv_sec = USE_WINDOWS ? 0 : 1; tvTimeout.tv_usec = USE_WINDOWS ? 50000 : 0; int iRes = select ( iMaxFD, &fdsAccept, NULL, NULL, &tvTimeout ); if ( iRes==0 ) return NULL; if ( iRes<0 ) { int iErrno = sphSockGetErrno(); if ( iErrno==EINTR || iErrno==EAGAIN || iErrno==EWOULDBLOCK ) return NULL; static int iLastErrno = -1; if ( iLastErrno!=iErrno ) sphWarning ( "select() failed: %s", sphSockError(iErrno) ); iLastErrno = iErrno; return NULL; } ARRAY_FOREACH ( i, g_dListeners ) { if ( !FD_ISSET ( g_dListeners[i].m_iSock, &fdsAccept ) ) continue; // accept struct sockaddr_storage saStorage; socklen_t uLength = sizeof(saStorage); int iClientSock = accept ( g_dListeners[i].m_iSock, (struct sockaddr *)&saStorage, &uLength ); // handle failures if ( iClientSock<0 ) { const int iErrno = sphSockGetErrno(); if ( iErrno==EINTR || iErrno==ECONNABORTED || iErrno==EAGAIN || iErrno==EWOULDBLOCK ) return NULL; sphFatal ( "accept() failed: %s", sphSockError(iErrno) ); } if ( g_pStats ) { g_tStatsMutex.Lock(); g_pStats->m_iConnections++; g_tStatsMutex.Unlock(); } if ( g_eWorkers==MPM_PREFORK ) { // protected by accept mutex if ( ++*g_pConnID<0 ) *g_pConnID = 0; g_iConnID = *g_pConnID; } else { if ( ++g_iConnID<0 ) g_iConnID = 0; } // format client address if ( sClientName ) { sClientName[0] = '\0'; if ( saStorage.ss_family==AF_INET ) { struct sockaddr_in * pSa = ((struct sockaddr_in *)&saStorage); sphFormatIP ( sClientName, SPH_ADDRESS_SIZE, pSa->sin_addr.s_addr ); char * d = sClientName; while ( *d ) d++; snprintf ( d, 7, ":%d", (int)ntohs ( pSa->sin_port ) ); //NOLINT } if ( saStorage.ss_family==AF_UNIX ) strncpy ( sClientName, "(local)", SPH_ADDRESS_SIZE ); } // accepted! #if !USE_WINDOWS // FIXME!!! either get git of select() or allocate list of FD (with dup2 back instead close for thouse FD) // with threads workers to prevent dup2 closes valid FD if ( SPH_FDSET_OVERFLOW ( iClientSock ) ) { if ( ( g_eWorkers==MPM_FORK || g_eWorkers==MPM_PREFORK ) ) { iClientSock = dup2 ( iClientSock, g_iClientFD ); } else { FailClient ( iClientSock, SEARCHD_RETRY, "server maxed out, retry in a second" ); sphWarning ( "maxed out, dismissing client (socket=%d)", iClientSock ); sphSockClose ( iClientSock ); return NULL; } } #endif *pClientSock = iClientSock; return &g_dListeners[i]; } return NULL; } void TickPreforked ( CSphProcessSharedMutex * pAcceptMutex ) { assert ( !g_bHeadDaemon ); assert ( pAcceptMutex ); if ( g_bGotSigterm ) exit ( 0 ); int iClientSock = -1; char sClientIP[SPH_ADDRPORT_SIZE]; Listener_t * pListener = NULL; for ( ; !g_bGotSigterm && !pListener; ) { if ( pAcceptMutex->TimedLock ( 100 ) ) { if ( !g_bGotSigterm ) pListener = DoAccept ( &iClientSock, sClientIP ); pAcceptMutex->Unlock(); } } if ( g_bGotSigterm ) exit ( 0 ); // clean shutdown (after mutex unlock) if ( pListener ) { HandleClient ( pListener->m_eProto, iClientSock, sClientIP, NULL ); sphSockClose ( iClientSock ); } } void HandlerThread ( void * pArg ) { // setup query guard for threaded mode SphCrashLogger_c tQueryTLS; tQueryTLS.SetupTLS (); // handle that client ThdDesc_t * pThd = (ThdDesc_t*) pArg; sphThreadSet ( g_tConnKey, &pThd->m_iConnID ); HandleClient ( pThd->m_eProto, pThd->m_iClientSock, pThd->m_sClientName.cstr(), pThd ); sphSockClose ( pThd->m_iClientSock ); // done; remove myself from the table g_tThdMutex.Lock (); ARRAY_FOREACH ( i, g_dThd ) if ( g_dThd[i]==pThd ) { #if USE_WINDOWS // FIXME? this is sort of automatic on UNIX (pthread_exit() gets implicitly called on return) CloseHandle ( pThd->m_tThd ); #endif SafeDelete ( pThd ); g_dThd.RemoveFast(i); break; } g_tThdMutex.Unlock (); // something went wrong while removing; report if ( pThd ) { sphWarning ( "thread missing from thread table" ); #if USE_WINDOWS // FIXME? this is sort of automatic on UNIX (pthread_exit() gets implicitly called on return) CloseHandle ( pThd->m_tThd ); #endif SafeDelete ( pThd ); } } static void CheckChildrenTerm () { #if !USE_WINDOWS if ( g_eWorkers!=MPM_PREFORK || !g_dTermChildren.GetLength() || g_tmRotateChildren>sphMicroTimer() ) return; sphLogDebugvv ( "killing child %d ( %d )", g_dTermChildren.Last(), g_dTermChildren.GetLength() ); kill ( g_dTermChildren.Last(), SIGTERM ); g_dTermChildren.Resize ( g_dTermChildren.GetLength()-1 ); g_tmRotateChildren = sphMicroTimer() + g_iRotationThrottle*1000; #endif } void TickHead ( CSphProcessSharedMutex * pAcceptMutex ) { CheckSignals (); if ( !g_bHeadDaemon ) return; CheckLeaks (); CheckReopen (); CheckPipes (); CheckDelete (); CheckRotate (); CheckFlush (); CheckChildrenTerm(); sphInfo ( NULL ); // flush dupes if ( pAcceptMutex ) { // FIXME! what if all children are busy; we might want to accept here and temp fork more sphSleepMsec ( 1000 ); return; } int iClientSock; char sClientName[SPH_ADDRPORT_SIZE]; Listener_t * pListener = DoAccept ( &iClientSock, sClientName ); if ( !pListener ) return; if ( ( g_iMaxChildren && ( g_dChildren.GetLength()>=g_iMaxChildren || g_dThd.GetLength()>=g_iMaxChildren ) ) || ( g_iRotateCount && !g_bSeamlessRotate ) ) { FailClient ( iClientSock, SEARCHD_RETRY, "server maxed out, retry in a second" ); sphWarning ( "maxed out, dismissing client" ); if ( g_pStats ) g_pStats->m_iMaxedOut++; return; } // handle the client if ( g_eWorkers==MPM_NONE ) { HandleClient ( pListener->m_eProto, iClientSock, sClientName, NULL ); sphSockClose ( iClientSock ); return; } #if !USE_WINDOWS if ( g_eWorkers==MPM_FORK ) { sphLogDebugv ( "conn %s: accepted, socket %d", sClientName, iClientSock ); int iChildPipe = PipeAndFork ( false, -1 ); SafeClose ( iChildPipe ); if ( !g_bHeadDaemon ) { // child process, handle client sphLogDebugv ( "conn %s: forked handler, socket %d", sClientName, iClientSock ); HandleClient ( pListener->m_eProto, iClientSock, sClientName, NULL ); sphSockClose ( iClientSock ); exit ( 0 ); } else { // parent process, continue accept()ing sphSockClose ( iClientSock ); } } #endif // !USE_WINDOWS if ( g_eWorkers==MPM_THREADS ) { ThdDesc_t * pThd = new ThdDesc_t (); pThd->m_eProto = pListener->m_eProto; pThd->m_iClientSock = iClientSock; pThd->m_sClientName = sClientName; pThd->m_iConnID = g_iConnID; g_tThdMutex.Lock (); g_dThd.Add ( pThd ); if ( !sphThreadCreate ( &pThd->m_tThd, HandlerThread, pThd, true ) ) { g_dThd.Pop(); SafeDelete ( pThd ); FailClient ( iClientSock, SEARCHD_RETRY, "failed to create worker thread" ); sphWarning ( "failed to create worker thread, threads(%d), error[%d] %s", g_dThd.GetLength(), errno, strerror(errno) ); } g_tThdMutex.Unlock (); return; } // default (should not happen) sphSockClose ( iClientSock ); } void ConfigureSearchd ( const CSphConfig & hConf, bool bOptPIDFile ) { if ( !hConf.Exists ( "searchd" ) || !hConf["searchd"].Exists ( "searchd" ) ) sphFatal ( "'searchd' config section not found in '%s'", g_sConfigFile.cstr () ); const CSphConfigSection & hSearchd = hConf["searchd"]["searchd"]; if ( !hConf.Exists ( "index" ) ) sphFatal ( "no indexes found in '%s'", g_sConfigFile.cstr () ); if ( bOptPIDFile ) if ( !hSearchd ( "pid_file" ) ) sphFatal ( "mandatory option 'pid_file' not found in 'searchd' section" ); if ( hSearchd.Exists ( "read_timeout" ) && hSearchd["read_timeout"].intval()>=0 ) g_iReadTimeout = hSearchd["read_timeout"].intval(); if ( hSearchd.Exists ( "client_timeout" ) && hSearchd["client_timeout"].intval()>=0 ) g_iClientTimeout = hSearchd["client_timeout"].intval(); if ( hSearchd.Exists ( "max_children" ) && hSearchd["max_children"].intval()>=0 ) g_iMaxChildren = hSearchd["max_children"].intval(); g_bPreopenIndexes = hSearchd.GetInt ( "preopen_indexes", (int)g_bPreopenIndexes )!=0; g_bOnDiskDicts = hSearchd.GetInt ( "ondisk_dict_default", (int)g_bOnDiskDicts )!=0; g_bUnlinkOld = hSearchd.GetInt ( "unlink_old", (int)g_bUnlinkOld )!=0; g_iExpansionLimit = hSearchd.GetInt ( "expansion_limit", 0 ); g_bCompatResults = hSearchd.GetInt ( "compat_sphinxql_magics", (int)g_bCompatResults )!=0; if ( g_bCompatResults ) sphWarning ( "compat_sphinxql_magics=1 is deprecated; please update your application and config" ); if ( hSearchd("max_matches") ) { int iMax = hSearchd["max_matches"].intval(); if ( iMax<0 || iMax>10000000 ) { sphWarning ( "max_matches=%d out of bounds; using default 1000", iMax ); } else { g_iMaxMatches = iMax; } } if ( hSearchd("subtree_docs_cache") ) g_iMaxCachedDocs = hSearchd.GetSize ( "subtree_docs_cache", g_iMaxCachedDocs ); if ( hSearchd("subtree_hits_cache") ) g_iMaxCachedHits = hSearchd.GetSize ( "subtree_hits_cache", g_iMaxCachedHits ); if ( hSearchd("seamless_rotate") ) g_bSeamlessRotate = ( hSearchd["seamless_rotate"].intval()!=0 ); if ( !g_bSeamlessRotate && g_bPreopenIndexes ) sphWarning ( "preopen_indexes=1 has no effect with seamless_rotate=0" ); g_iAttrFlushPeriod = hSearchd.GetInt ( "attr_flush_period", g_iAttrFlushPeriod ); g_iMaxPacketSize = hSearchd.GetSize ( "max_packet_size", g_iMaxPacketSize ); g_iMaxFilters = hSearchd.GetInt ( "max_filters", g_iMaxFilters ); g_iMaxFilterValues = hSearchd.GetInt ( "max_filter_values", g_iMaxFilterValues ); g_iMaxBatchQueries = hSearchd.GetInt ( "max_batch_queries", g_iMaxBatchQueries ); g_iDistThreads = hSearchd.GetInt ( "dist_threads", g_iDistThreads ); g_iPreforkChildren = hSearchd.GetInt ( "prefork", g_iPreforkChildren ); if ( hSearchd ( "collation_libc_locale" ) ) { const char * sLocale = hSearchd.GetStr ( "collation_libc_locale" ); if ( !setlocale ( LC_COLLATE, sLocale ) ) sphWarning ( "setlocale failed (locale='%s')", sLocale ); } if ( hSearchd ( "collation_server" ) ) { CSphString sCollation = hSearchd.GetStr ( "collation_server" ); CSphString sError; g_eCollation = sphCollationFromName ( sCollation, &sError ); if ( !sError.IsEmpty() ) sphWarning ( "%s", sError.cstr() ); } if ( hSearchd("thread_stack") ) { int iThreadStackSizeMin = 65536; int iThreadStackSizeMax = 2*1024*1024; int iStackSize = hSearchd.GetSize ( "thread_stack", iThreadStackSizeMin ); if ( iStackSizeiThreadStackSizeMax ) sphWarning ( "thread_stack is %d will be clamped to range ( 65k to 2M )", iStackSize ); iStackSize = Min ( iStackSize, iThreadStackSizeMax ); iStackSize = Max ( iStackSize, iThreadStackSizeMin ); sphSetMyStackSize ( iStackSize ); } char sHandshake1[] = "\x00\x00\x00" // packet length "\x00" // packet id "\x0A"; // protocol version; v.10 char sHandshake2[] = "\x01\x00\x00\x00" // thread id "\x01\x02\x03\x04\x05\x06\x07\x08" // scramble buffer (for auth) "\x00" // filler "\x08\x82" // server capabilities; CLIENT_PROTOCOL_41 | CLIENT_CONNECT_WITH_DB | SECURE_CONNECTION "\x00" // server language "\x02\x00" // server status "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" // filler "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d"; // scramble buffer2 (for auth, 4.1+) const char * sVersion = hSearchd.GetStr ( "mysql_version_string", SPHINX_VERSION ); int iLen = strlen ( sVersion ); g_iMysqlHandshake = sizeof(sHandshake1) + strlen(sVersion) + sizeof(sHandshake2) - 1; if ( g_iMysqlHandshake>=(int)sizeof(g_sMysqlHandshake) ) { sphWarning ( "mysql_version_string too long; using default (version=%s)", SPHINX_VERSION ); g_iMysqlHandshake = sizeof(sHandshake1) + strlen(SPHINX_VERSION) + sizeof(sHandshake2) - 1; assert ( g_iMysqlHandshake < (int)sizeof(g_sMysqlHandshake) ); } char * p = g_sMysqlHandshake; memcpy ( p, sHandshake1, sizeof(sHandshake1)-1 ); memcpy ( p+sizeof(sHandshake1)-1, sVersion, iLen+1 ); memcpy ( p+sizeof(sHandshake1)+iLen, sHandshake2, sizeof(sHandshake2)-1 ); g_sMysqlHandshake[0] = (char)(g_iMysqlHandshake-4); // safe, as long as buffer size is 128 } void ConfigureAndPreload ( const CSphConfig & hConf, const char * sOptIndex ) { int iCounter = 1; int iValidIndexes = 0; int64_t tmLoad = -sphMicroTimer(); hConf["index"].IterateStart (); while ( hConf["index"].IterateNext() ) { const CSphConfigSection & hIndex = hConf["index"].IterateGet(); const char * sIndexName = hConf["index"].IterateGetKey().cstr(); if ( g_bOptNoDetach && sOptIndex && strcasecmp ( sIndexName, sOptIndex )!=0 ) continue; ESphAddIndex eAdd = AddIndex ( sIndexName, hIndex ); if ( eAdd==ADD_LOCAL || eAdd==ADD_RT ) { ServedIndex_t & tIndex = g_pIndexes->GetUnlockedEntry ( sIndexName ); iCounter++; fprintf ( stdout, "precaching index '%s'\n", sIndexName ); fflush ( stdout ); tIndex.m_pIndex->SetProgressCallback ( ShowProgress ); if ( HasFiles ( tIndex, g_dNewExts ) ) { tIndex.m_bOnlyNew = !HasFiles ( tIndex, g_dCurExts ); if ( RotateIndexGreedy ( tIndex, sIndexName ) ) { CSphString sError; if ( !sphFixupIndexSettings ( tIndex.m_pIndex, hIndex, sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", sIndexName, sError.cstr() ); tIndex.m_bEnabled = false; } } else { if ( PrereadNewIndex ( tIndex, hIndex, sIndexName ) ) tIndex.m_bEnabled = true; } } else { tIndex.m_bOnlyNew = false; if ( PrereadNewIndex ( tIndex, hIndex, sIndexName ) ) tIndex.m_bEnabled = true; } CSphString sError; if ( tIndex.m_bEnabled && !CheckIndex ( tIndex.m_pIndex, sError ) ) { sphWarning ( "index '%s': %s - NOT SERVING", sIndexName, sError.cstr() ); tIndex.m_bEnabled = false; } if ( !tIndex.m_bEnabled ) continue; } if ( eAdd!=ADD_ERROR ) iValidIndexes++; } tmLoad += sphMicroTimer(); if ( !iValidIndexes ) sphFatal ( "no valid indexes to serve" ); else fprintf ( stdout, "precached %d indexes in %0.3f sec\n", iCounter-1, float(tmLoad)/1000000 ); } void OpenDaemonLog ( const CSphConfigSection & hSearchd ) { // create log const char * sLog = "searchd.log"; if ( hSearchd.Exists ( "log" ) ) { if ( hSearchd["log"]=="syslog" ) { #if !USE_SYSLOG if ( g_iLogFile<0 ) { g_iLogFile = STDOUT_FILENO; sphWarning ( "failed to use syslog for logging. You have to reconfigure --with-syslog and rebuild the daemon!" ); sphInfo ( "will use default file 'searchd.log' for logging." ); } #else g_bLogSyslog = true; #endif } else { sLog = hSearchd["log"].cstr(); } } umask ( 066 ); if ( !g_bLogSyslog ) { g_iLogFile = open ( sLog, O_CREAT | O_RDWR | O_APPEND, S_IREAD | S_IWRITE ); if ( g_iLogFile<0 ) { g_iLogFile = STDOUT_FILENO; sphFatal ( "failed to open log file '%s': %s", sLog, strerror(errno) ); } } g_sLogFile = sLog; g_bLogTty = isatty ( g_iLogFile )!=0; } int WINAPI ServiceMain ( int argc, char **argv ) { g_bLogTty = isatty ( g_iLogFile )!=0; #if USE_WINDOWS CSphVector dArgs; if ( g_bService ) { g_ssHandle = RegisterServiceCtrlHandler ( g_sServiceName, ServiceControl ); if ( !g_ssHandle ) sphFatal ( "failed to start service: RegisterServiceCtrlHandler() failed: %s", WinErrorInfo() ); g_ss.dwServiceType = SERVICE_WIN32_OWN_PROCESS; MySetServiceStatus ( SERVICE_START_PENDING, NO_ERROR, 4000 ); if ( argc<=1 ) { dArgs.Resize ( g_dArgs.GetLength() ); ARRAY_FOREACH ( i, g_dArgs ) dArgs[i] = (char*) g_dArgs[i].cstr(); argc = g_dArgs.GetLength(); argv = &dArgs[0]; } } char szPipeName[64]; snprintf ( szPipeName, sizeof(szPipeName), "\\\\.\\pipe\\searchd_%d", getpid() ); g_hPipe = CreateNamedPipe ( szPipeName, PIPE_ACCESS_INBOUND, PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_NOWAIT, PIPE_UNLIMITED_INSTANCES, 0, WIN32_PIPE_BUFSIZE, NMPWAIT_NOWAIT, NULL ); ConnectNamedPipe ( g_hPipe, NULL ); #endif if ( !g_bService ) fprintf ( stdout, SPHINX_BANNER ); ////////////////////// // parse command line ////////////////////// CSphConfig conf; bool bOptStop = false; bool bOptStopWait = false; bool bOptStatus = false; bool bOptPIDFile = false; const char * sOptIndex = NULL; int iOptPort = 0; bool bOptPort = false; CSphString sOptListen; bool bOptListen = false; bool bTestMode = false; DWORD uReplayFlags = 0; #define OPT(_a1,_a2) else if ( !strcmp(argv[i],_a1) || !strcmp(argv[i],_a2) ) #define OPT1(_a1) else if ( !strcmp(argv[i],_a1) ) int i; for ( i=1; i=argc ) break; OPT ( "-c", "--config" ) g_sConfigFile = argv[++i]; OPT ( "-p", "--port" ) { bOptPort = true; iOptPort = atoi ( argv[++i] ); } OPT ( "-l", "--listen" ) { bOptListen = true; sOptListen = argv[++i]; } OPT ( "-i", "--index" ) sOptIndex = argv[++i]; #if USE_WINDOWS OPT1 ( "--servicename" ) ++i; // it's valid but handled elsewhere #endif // handle unknown options else break; } if ( i!=argc ) sphFatal ( "malformed or unknown option near '%s'; use '-h' or '--help' to see available options.", argv[i] ); #if USE_WINDOWS // init WSA on Windows // we need to do it this early because otherwise gethostbyname() from config parser could fail WSADATA tWSAData; int iStartupErr = WSAStartup ( WINSOCK_VERSION, &tWSAData ); if ( iStartupErr ) sphFatal ( "failed to initialize WinSock2: %s", sphSockError ( iStartupErr ) ); #ifndef NDEBUG // i want my windows debugging sessions to log onto stdout g_bOptNoDetach = true; g_bOptNoLock = true; #endif #endif if ( !bOptPIDFile ) bOptPIDFile = !g_bOptNoLock; // check port and listen arguments early if ( !g_bOptNoDetach && ( bOptPort || bOptListen ) ) { sphWarning ( "--listen and --port are only allowed in --console debug mode; switch ignored" ); bOptPort = bOptListen = false; } if ( bOptPort ) { if ( bOptListen ) sphFatal ( "please specify either --port or --listen, not both" ); CheckPort ( iOptPort ); } ///////////////////// // parse config file ///////////////////// // fallback to defaults if there was no explicit config specified while ( !g_sConfigFile.cstr() ) { #ifdef SYSCONFDIR g_sConfigFile = SYSCONFDIR "/sphinx.conf"; if ( sphIsReadable ( g_sConfigFile.cstr () ) ) break; #endif g_sConfigFile = "./sphinx.conf"; if ( sphIsReadable ( g_sConfigFile.cstr () ) ) break; g_sConfigFile = NULL; break; } if ( !g_sConfigFile.cstr () ) sphFatal ( "no readable config file (looked in " #ifdef SYSCONFDIR SYSCONFDIR "/sphinx.conf, " #endif "./sphinx.conf)." ); sphInfo ( "using config file '%s'...", g_sConfigFile.cstr () ); CheckConfigChanges (); // do parse CSphConfigParser cp; if ( !cp.Parse ( g_sConfigFile.cstr () ) ) sphFatal ( "failed to parse config file '%s'", g_sConfigFile.cstr () ); const CSphConfig & hConf = cp.m_tConf; if ( !hConf.Exists ( "searchd" ) || !hConf["searchd"].Exists ( "searchd" ) ) sphFatal ( "'searchd' config section not found in '%s'", g_sConfigFile.cstr () ); const CSphConfigSection & hSearchdpre = hConf["searchd"]["searchd"]; //////////////////////// // stop running searchd //////////////////////// if ( bOptStop ) { if ( !hSearchdpre("pid_file") ) sphFatal ( "stop: option 'pid_file' not found in '%s' section 'searchd'", g_sConfigFile.cstr () ); const char * sPid = hSearchdpre["pid_file"].cstr(); // shortcut FILE * fp = fopen ( sPid, "r" ); if ( !fp ) sphFatal ( "stop: pid file '%s' does not exist or is not readable", sPid ); char sBuf[16]; int iLen = (int) fread ( sBuf, 1, sizeof(sBuf)-1, fp ); sBuf[iLen] = '\0'; fclose ( fp ); int iPid = atoi(sBuf); if ( iPid<=0 ) sphFatal ( "stop: failed to read valid pid from '%s'", sPid ); #if USE_WINDOWS bool bTerminatedOk = false; char szPipeName[64]; snprintf ( szPipeName, sizeof(szPipeName), "\\\\.\\pipe\\searchd_%d", iPid ); HANDLE hPipe = INVALID_HANDLE_VALUE; while ( hPipe==INVALID_HANDLE_VALUE ) { hPipe = CreateFile ( szPipeName, GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, NULL ); if ( hPipe==INVALID_HANDLE_VALUE ) { if ( GetLastError()!=ERROR_PIPE_BUSY ) { fprintf ( stdout, "WARNING: could not open pipe (GetLastError()=%d)\n", GetLastError () ); break; } if ( !WaitNamedPipe ( szPipeName, 1000 ) ) { fprintf ( stdout, "WARNING: could not open pipe (GetLastError()=%d)\n", GetLastError () ); break; } } } if ( hPipe!=INVALID_HANDLE_VALUE ) { DWORD uWritten = 0; BYTE uWrite = 1; BOOL bResult = WriteFile ( hPipe, &uWrite, 1, &uWritten, NULL ); if ( !bResult ) fprintf ( stdout, "WARNING: failed to send SIGHTERM to searchd (pid=%d, GetLastError()=%d)\n", iPid, GetLastError () ); bTerminatedOk = !!bResult; CloseHandle ( hPipe ); } if ( bTerminatedOk ) { sphInfo ( "stop: succesfully terminated pid %d", iPid ); exit ( 0 ); } else sphFatal ( "stop: error terminating pid %d", iPid ); #else CSphString sPipeName; int iPipeCreated = -1; int hPipe = -1; if ( bOptStopWait ) { sPipeName = GetNamedPipeName ( iPid ); iPipeCreated = mkfifo ( sPipeName.cstr(), 0666 ); if ( iPipeCreated!=-1 ) hPipe = ::open ( sPipeName.cstr(), O_RDONLY | O_NONBLOCK ); if ( iPipeCreated==-1 ) sphWarning ( "mkfifo failed (path=%s, err=%d, msg=%s); will NOT wait", sPipeName.cstr(), errno, strerror(errno) ); else if ( hPipe==-1 ) sphWarning ( "open failed (path=%s, err=%d, msg=%s); will NOT wait", sPipeName.cstr(), errno, strerror(errno) ); } if ( kill ( iPid, SIGTERM ) ) sphFatal ( "stop: kill() on pid %d failed: %s", iPid, strerror(errno) ); else sphInfo ( "stop: successfully sent SIGTERM to pid %d", iPid ); int iExitCode = ( bOptStopWait && ( iPipeCreated==-1 || hPipe==-1 ) ) ? 1 : 0; if ( bOptStopWait && hPipe!=-1 ) { while ( sphIsReadable ( sPid, NULL ) ) sphSleepMsec ( 5 ); DWORD uStatus = 0; if ( ::read ( hPipe, &uStatus, sizeof(DWORD) )!=sizeof(DWORD) ) iExitCode = 3; // stopped demon crashed during stop else iExitCode = uStatus==1 ? 0 : 2; // uStatus == 1 - AttributeSave - ok, other values - error } if ( hPipe!=-1 ) ::close ( hPipe ); if ( iPipeCreated!=-1 ) ::unlink ( sPipeName.cstr() ); exit ( iExitCode ); #endif } //////////////////////////////// // query running searchd status //////////////////////////////// if ( bOptStatus ) { QueryStatus ( hSearchdpre("listen") ); exit ( 0 ); } ///////////////////// // configure searchd ///////////////////// ConfigureSearchd ( hConf, bOptPIDFile ); g_bWatchdog = hSearchdpre.GetInt ( "watchdog", g_bWatchdog )!=0; if ( hSearchdpre("workers") ) { if ( hSearchdpre["workers"]=="none" ) g_eWorkers = MPM_NONE; else if ( hSearchdpre["workers"]=="fork" ) g_eWorkers = MPM_FORK; else if ( hSearchdpre["workers"]=="prefork" ) g_eWorkers = MPM_PREFORK; else if ( hSearchdpre["workers"]=="threads" ) g_eWorkers = MPM_THREADS; else sphFatal ( "unknown workers=%s value", hSearchdpre["workers"].cstr() ); } #if USE_WINDOWS if ( g_eWorkers==MPM_FORK || g_eWorkers==MPM_PREFORK ) sphFatal ( "workers=fork and workers=prefork are not supported on Windows" ); #endif if ( g_iMaxPacketSize<128*1024 || g_iMaxPacketSize>128*1024*1024 ) sphFatal ( "max_packet_size out of bounds (128K..128M)" ); if ( g_iMaxFilters<1 || g_iMaxFilters>10240 ) sphFatal ( "max_filters out of bounds (1..10240)" ); if ( g_iMaxFilterValues<1 || g_iMaxFilterValues>10485760 ) sphFatal ( "max_filter_values out of bounds (1..10485760)" ); bool bVisualLoad = true; bool bWatched = false; #if !USE_WINDOWS // Let us start watchdog right now, on foreground first. int iDevNull = open ( "/dev/null", O_RDWR ); if ( g_bWatchdog && g_eWorkers==MPM_THREADS && !g_bOptNoDetach ) { bWatched = true; if ( !g_bOptNoLock ) OpenDaemonLog ( hConf["searchd"]["searchd"] ); bVisualLoad = SetWatchDog ( iDevNull ); close ( g_iLogFile ); // just the 'IT Happens' magic - switch off, then on. OpenDaemonLog ( hConf["searchd"]["searchd"] ); } #endif // create the pid if ( bOptPIDFile ) { g_sPidFile = hSearchdpre["pid_file"].cstr(); g_iPidFD = ::open ( g_sPidFile, O_CREAT | O_WRONLY, S_IREAD | S_IWRITE ); if ( g_iPidFD<0 ) sphFatal ( "failed to create pid file '%s': %s", g_sPidFile, strerror(errno) ); } if ( bOptPIDFile && !sphLockEx ( g_iPidFD, false ) ) sphFatal ( "failed to lock pid file '%s': %s (searchd already running?)", g_sPidFile, strerror(errno) ); if ( bWatched && !bVisualLoad && CheckConfigChanges() ) { // reparse the config file sphInfo ( "Reloading the config" ); if ( !cp.ReParse ( g_sConfigFile.cstr () ) ) sphFatal ( "failed to parse config file '%s'", g_sConfigFile.cstr () ); sphInfo ( "Reconfigure the daemon" ); ConfigureSearchd ( hConf, bOptPIDFile ); // reinit the arena const CSphConfigSection & hSearchd = hConf["searchd"]["searchd"]; sphInfo ( "Reload the indexes" ); const char * sArenaError = sphArenaInit ( hSearchd.GetSize ( "mva_updates_pool", MVA_UPDATES_POOL ) ); if ( sArenaError ) sphWarning ( "process shared mutex unsupported, MVA update disabled ( %s )", sArenaError ); // reload all the indexes ConfigureAndPreload ( hConf, sOptIndex ); } // hSearchdpre might be dead if we reloaded the config. const CSphConfigSection & hSearchd = hConf["searchd"]["searchd"]; ////////////////////////////////////////////////// // shared stuff (perf counters, flushing) startup ////////////////////////////////////////////////// g_pStats = InitSharedBuffer ( g_tStatsBuffer, 1 ); g_pFlush = InitSharedBuffer ( g_tFlushBuffer, 1 ); g_pStats->m_uStarted = (DWORD)time(NULL); if ( g_eWorkers==MPM_PREFORK ) g_pConnID = (int*) InitSharedBuffer ( g_dConnID, sizeof(g_iConnID) ); if ( g_eWorkers==MPM_THREADS ) { if ( !sphThreadKeyCreate ( &g_tConnKey ) ) sphFatal ( "failed to create TLS for connection ID" ); // for simplicity, UDFs are going to be available in threaded mode only for now sphUDFInit ( hSearchd.GetStr ( "plugin_dir" ) ); } //////////////////// // network startup //////////////////// Listener_t tListener; tListener.m_eProto = PROTO_SPHINX; // command line arguments override config (but only in --console) if ( bOptListen ) { AddListener ( sOptListen ); } else if ( bOptPort ) { tListener.m_iSock = sphCreateInetSocket ( htonl ( INADDR_ANY ), iOptPort ); g_dListeners.Add ( tListener ); } else { // listen directives in configuration file for ( CSphVariant * v = hSearchd("listen"); v; v = v->m_pNext ) AddListener ( *v ); // handle deprecated directives if ( hSearchd("port") ) { DWORD uAddr = hSearchd.Exists("address") ? sphGetAddress ( hSearchd["address"].cstr(), GETADDR_STRICT ) : htonl ( INADDR_ANY ); int iPort = hSearchd["port"].intval(); CheckPort(iPort); tListener.m_iSock = sphCreateInetSocket ( uAddr, iPort ); g_dListeners.Add ( tListener ); } // still nothing? default is to listen on our two ports if ( !g_dListeners.GetLength() ) { tListener.m_iSock = sphCreateInetSocket ( htonl ( INADDR_ANY ), SPHINXAPI_PORT ); tListener.m_eProto = PROTO_SPHINX; g_dListeners.Add ( tListener ); tListener.m_iSock = sphCreateInetSocket ( htonl ( INADDR_ANY ), SPHINXQL_PORT ); tListener.m_eProto = PROTO_MYSQL41; g_dListeners.Add ( tListener ); } } #if !USE_WINDOWS // reserve an fd for clients g_iClientFD = dup ( iDevNull ); #endif g_pIndexes = new IndexHash_c(); ////////////////////// // build indexes hash ////////////////////// // setup mva updates arena here, since we could have saved persistent mva updates const char * sArenaError = sphArenaInit ( hSearchd.GetSize ( "mva_updates_pool", MVA_UPDATES_POOL ) ); if ( sArenaError ) sphWarning ( "process shared mutex unsupported, MVA update disabled ( %s )", sArenaError ); // configure and preload ConfigureAndPreload ( hConf, sOptIndex ); /////////// // startup /////////// if ( g_eWorkers==MPM_THREADS ) sphRTInit(); // handle my signals SetSignalHandlers (); // create logs if ( !g_bOptNoLock ) { // create log const char * sLog = "searchd.log"; if ( hSearchd.Exists ( "log" ) ) { if ( hSearchd["log"]=="syslog" ) { #if !USE_SYSLOG if ( g_iLogFile<0 ) { g_iLogFile = STDOUT_FILENO; sphWarning ( "failed to use syslog for logging. You have to reconfigure --with-syslog and rebuild the daemon!" ); sphInfo ( "will use default file 'searchd.log' for logging." ); } #else g_bLogSyslog = true; #endif } else { sLog = hSearchd["log"].cstr(); } } umask ( 066 ); if ( g_iLogFile!=STDOUT_FILENO ) { close ( g_iLogFile ); g_iLogFile = STDOUT_FILENO; } if ( !g_bLogSyslog ) { g_iLogFile = open ( sLog, O_CREAT | O_RDWR | O_APPEND, S_IREAD | S_IWRITE ); if ( g_iLogFile<0 ) { g_iLogFile = STDOUT_FILENO; sphFatal ( "failed to open log file '%s': %s", sLog, strerror(errno) ); } } g_sLogFile = sLog; g_bLogTty = isatty ( g_iLogFile )!=0; // create query log if required if ( hSearchd.Exists ( "query_log" ) ) { if ( hSearchd["query_log"]=="syslog" ) g_bQuerySyslog = true; else { g_iQueryLogFile = open ( hSearchd["query_log"].cstr(), O_CREAT | O_RDWR | O_APPEND, S_IREAD | S_IWRITE ); if ( g_iQueryLogFile<0 ) sphFatal ( "failed to open query log file '%s': %s", hSearchd["query_log"].cstr(), strerror(errno) ); } g_sQueryLogFile = hSearchd["query_log"].cstr(); } } if ( !strcmp ( hSearchd.GetStr ( "query_log_format", "plain" ), "sphinxql" ) ) g_eLogFormat = LOG_FORMAT_SPHINXQL; // prepare to detach if ( !g_bOptNoDetach ) { #if !USE_WINDOWS if ( !bWatched || bVisualLoad ) { close ( STDIN_FILENO ); close ( STDOUT_FILENO ); close ( STDERR_FILENO ); dup2 ( iDevNull, STDIN_FILENO ); dup2 ( iDevNull, STDOUT_FILENO ); dup2 ( iDevNull, STDERR_FILENO ); } #endif ReleaseTTYFlag(); // explicitly unlock everything in parent immediately before fork // // there's a race in case another instance is started before // child re-acquires all locks; but let's hope that's rare if ( !bWatched ) { for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { ServedIndex_t & tServed = it.Get(); if ( tServed.m_bEnabled ) tServed.m_pIndex->Unlock(); } } } if ( bOptPIDFile && !bWatched ) sphLockUn ( g_iPidFD ); #if !USE_WINDOWS if ( !g_bOptNoDetach && !bWatched ) { switch ( fork() ) { case -1: // error Shutdown (); sphFatal ( "fork() failed (reason: %s)", strerror ( errno ) ); exit ( 1 ); case 0: // daemonized child break; default: // tty-controlled parent sphSetProcessInfo ( false ); exit ( 0 ); } } #endif if ( g_eWorkers==MPM_THREADS ) sphRTConfigure ( hSearchd, bTestMode ); if ( bOptPIDFile ) { #if !USE_WINDOWS // re-lock pid // FIXME! there's a potential race here if ( !sphLockEx ( g_iPidFD, true ) ) sphFatal ( "failed to re-lock pid file '%s': %s", g_sPidFile, strerror(errno) ); #endif char sPid[16]; snprintf ( sPid, sizeof(sPid), "%d\n", (int)getpid() ); int iPidLen = strlen(sPid); lseek ( g_iPidFD, 0, SEEK_SET ); if ( !sphWrite ( g_iPidFD, sPid, iPidLen ) ) sphFatal ( "failed to write to pid file '%s' (errno=%d, msg=%s)", g_sPidFile, errno, strerror(errno) ); if ( ::ftruncate ( g_iPidFD, iPidLen ) ) sphFatal ( "failed to truncate pid file '%s' (errno=%d, msg=%s)", g_sPidFile, errno, strerror(errno) ); } #if USE_WINDOWS SetConsoleCtrlHandler ( CtrlHandler, TRUE ); #endif if ( !g_bOptNoDetach && !bWatched ) { // re-lock indexes for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) { ServedIndex_t & tServed = it.Get(); if ( !tServed.m_bEnabled ) continue; // obtain exclusive lock if ( !tServed.m_pIndex->Lock() ) { sphWarning ( "index '%s': lock: %s; INDEX UNUSABLE", it.GetKey().cstr(), tServed.m_pIndex->GetLastError().cstr() ); tServed.m_bEnabled = false; continue; } // try to mlock again because mlock does not survive over fork if ( !tServed.m_pIndex->Mlock() ) { sphWarning ( "index '%s': %s", it.GetKey().cstr(), tServed.m_pIndex->GetLastError().cstr() ); } } } // if we're running in console mode, dump queries to tty as well if ( g_bOptNoLock && hSearchd ( "query_log" ) ) { g_bQuerySyslog = false; g_bLogSyslog = false; g_iQueryLogFile = g_iLogFile; } #if USE_SYSLOG if ( g_bLogSyslog || g_bQuerySyslog ) { openlog ( "searchd", LOG_PID, LOG_DAEMON ); } #else if ( g_bQuerySyslog ) sphFatal ( "Wrong query_log file! You have to reconfigure --with-syslog and rebuild daemon if you want to use syslog there." ); #endif ///////////////// // serve clients ///////////////// g_bHeadDaemon = true; #if USE_WINDOWS if ( g_bService ) MySetServiceStatus ( SERVICE_RUNNING, NO_ERROR, 0 ); #endif sphSetReadBuffers ( hSearchd.GetSize ( "read_buffer", 0 ), hSearchd.GetSize ( "read_unhinted", 0 ) ); CSphProcessSharedMutex * pAcceptMutex = NULL; #if !USE_WINDOWS if ( g_eWorkers==MPM_PREFORK ) { pAcceptMutex = new CSphProcessSharedMutex(); if ( !pAcceptMutex ) sphFatal ( "failed to create process-shared mutex" ); if ( !pAcceptMutex->GetError() ) { while ( g_dChildren.GetLength() < g_iPreforkChildren ) { if ( PreforkChild()==0 ) // child process? break from here, go work break; } g_iRotationThrottle = hSearchd.GetInt ( "prefork_rotation_throttle", 0 ); } else { sphWarning ( "process shared mutex unsupported, switching to 'workers = fork' ( %s )", pAcceptMutex->GetError() ); g_eWorkers = MPM_FORK; SafeDelete ( pAcceptMutex ); } } #endif // in threaded mode, create a dedicated rotation thread if ( g_eWorkers==MPM_THREADS ) { if ( !g_tThdMutex.Init() || !g_tRotateQueueMutex.Init() || !g_tRotateConfigMutex.Init() ) sphDie ( "failed to init mutex" ); if ( g_bSeamlessRotate && !sphThreadCreate ( &g_tRotateThread, RotationThreadFunc , 0 ) ) sphDie ( "failed to create rotation thread" ); // reserving max to keep memory consumption constant between frames g_dThd.Reserve ( g_iMaxChildren*2 ); g_tDistLock.Init(); g_tFlushMutex.Init(); } // replay last binlog SmallStringHash_T hIndexes; for ( IndexHashIterator_c it ( g_pIndexes ); it.Next(); ) if ( it.Get().m_bEnabled ) hIndexes.Add ( it.Get().m_pIndex, it.GetKey() ); if ( g_eWorkers==MPM_THREADS ) sphReplayBinlog ( hIndexes, uReplayFlags, DumpMemStat ); if ( !g_bOptNoDetach ) g_bLogStdout = false; // create flush-rt thread if ( g_eWorkers==MPM_THREADS && !sphThreadCreate ( &g_tRtFlushThread, RtFlushThreadFunc, 0 ) ) sphDie ( "failed to create rt-flush thread" ); // almost ready, time to start listening int iBacklog = hSearchd.GetInt ( "listen_backlog", SEARCHD_BACKLOG ); ARRAY_FOREACH ( i, g_dListeners ) if ( listen ( g_dListeners[i].m_iSock, iBacklog )==-1 ) sphFatal ( "listen() failed: %s", sphSockError() ); sphInfo ( "accepting connections" ); for ( ;; ) { SphCrashLogger_c::SetupTimePID(); if ( !g_bHeadDaemon && pAcceptMutex ) TickPreforked ( pAcceptMutex ); else TickHead ( pAcceptMutex ); } } // NOLINT function length bool DieCallback ( const char * sMessage ) { sphLogFatal ( "%s", sMessage ); return false; // caller should not log } extern UservarIntSet_c * ( *g_pUservarsHook )( const CSphString & sUservar ); UservarIntSet_c * UservarsHook ( const CSphString & sUservar ) { CSphScopedLock tLock ( g_tUservarsMutex ); Uservar_t * pVar = g_hUservars ( sUservar ); if ( !pVar ) return NULL; assert ( pVar->m_eType==USERVAR_INT_SET ); pVar->m_pVal->AddRef(); return pVar->m_pVal; } int main ( int argc, char **argv ) { // threads should be initialized before memory allocations char cTopOfMainStack; sphThreadInit(); MemorizeStack ( &cTopOfMainStack ); sphSetDieCallback ( DieCallback ); sphSetLogger ( sphLog ); g_pUservarsHook = UservarsHook; sphCollationInit (); #if USE_WINDOWS int iNameIndex = -1; for ( int i=1; i=0 ) g_sServiceName = g_dArgs[iNameIndex].cstr (); SERVICE_TABLE_ENTRY dDispatcherTable[] = { { (LPSTR) g_sServiceName, (LPSERVICE_MAIN_FUNCTION)ServiceMain }, { NULL, NULL } }; if ( !StartServiceCtrlDispatcher ( dDispatcherTable ) ) sphFatal ( "StartServiceCtrlDispatcher() failed: %s", WinErrorInfo() ); } else #endif return ServiceMain ( argc, argv ); } // // $Id: searchd.cpp 3127 2012-03-01 00:26:39Z shodan $ // sphinx-2.0.4-release/src/svnxrev.pl0000644000176700017710000000334311216514107016603 0ustar deogardeogar#!/usr/bin/perl # svnxrev, an utility to extract SVN working copy revision information # from svn info --xml output # # svnversion is nice, but lacks options to extract tag/branch name, # which is something i would really like to mention in builds # # usage: svn info --xml WORKING-COPY-ROOT | perl svnxrev.pl [OUTPUT-HEADER-NAME] $UTILITY = "svnxrev"; # that's my name @PROJECTS = ( "sphinx", "sphinxsearh" ); # that's expected project name(s) $PREFIX = "SPH"; # that's the prefix for defines $OUTPUT = "sphinxversion.h"; # that's where i will write the result $OUTPUT = $ARGV[0] if ( $#ARGV==0 ); undef $/; $info = ; die ( "$UTILITY: failed to extract repository url" ) if (!( $info =~ /(.*?)<\/url>/ )); $url = $1; die ( "$UTILITY: failed to extract commit revision" ) if (!( $info =~ /; close ( FH ); if ( $current eq $result ) { print "$UTILITY: build tag $tagrev unchanged\n"; exit 0; } } open ( FH, "+>$OUTPUT" ) or die ( "$UTILITY: failed to write output file (name=$OUTPUT)" ); print FH $result; close ( FH ); print "$UTILITY: extracted build tag: $tagrev\n"; sphinx-2.0.4-release/src/md5.h0000644000176700017710000000647610454516162015411 0ustar deogardeogar/* Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. L. Peter Deutsch ghost@aladdin.com */ /* $Id: md5.h 374 2006-07-10 18:29:06Z shodan $ */ /* Independent implementation of MD5 (RFC 1321). This code implements the MD5 Algorithm defined in RFC 1321, whose text is available at http://www.ietf.org/rfc/rfc1321.txt The code is derived from the text of the RFC, including the test suite (section A.5) but excluding the rest of Appendix A. It does not include any code or documentation that is identified in the RFC as being copyrighted. The original and principal author of md5.h is L. Peter Deutsch . Other authors are noted in the change history that follows (in reverse chronological order): 2002-04-13 lpd Removed support for non-ANSI compilers; removed references to Ghostscript; clarified derivation from RFC 1321; now handles byte order either statically or dynamically. 1999-11-04 lpd Edited comments slightly for automatic TOC extraction. 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5); added conditionalization for C++ compilation from Martin Purschke . 1999-05-03 lpd Original version. */ #ifndef md5_INCLUDED # define md5_INCLUDED /* * This package supports both compile-time and run-time determination of CPU * byte order. If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is * defined as non-zero, the code will be compiled to run only on big-endian * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to * run on either big- or little-endian CPUs, but will run slightly less * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined. */ typedef unsigned char md5_byte_t; /* 8-bit byte */ typedef unsigned int md5_word_t; /* 32-bit word */ /* Define the state of the MD5 Algorithm. */ typedef struct md5_state_s { md5_word_t count[2]; /* message length in bits, lsw first */ md5_word_t abcd[4]; /* digest buffer */ md5_byte_t buf[64]; /* accumulate block */ } md5_state_t; #ifdef __cplusplus extern "C" { #endif /* Initialize the algorithm. */ void md5_init(md5_state_t *pms); /* Append a string to the message. */ void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes); /* Finish the message and return the digest. */ void md5_finish(md5_state_t *pms, md5_byte_t digest[16]); #ifdef __cplusplus } /* end extern "C" */ #endif #endif /* md5_INCLUDED */ sphinx-2.0.4-release/src/sphinxstd.h0000644000176700017710000015035211723621334016737 0ustar deogardeogar// // $Id: sphinxstd.h 3129 2012-03-01 07:18:52Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxstd_ #define _sphinxstd_ #if _MSC_VER>=1400 #define _CRT_SECURE_NO_DEPRECATE 1 #define _CRT_NONSTDC_NO_DEPRECATE 1 #endif #if (_MSC_VER>=1000) && !defined(__midl) && defined(_PREFAST_) typedef int __declspec("SAL_nokernel") __declspec("SAL_nodriver") __prefast_flag_kernel_driver_mode; #endif #if defined(_MSC_VER) && (_MSC_VER<1400) #define vsnprintf _vsnprintf #endif #ifndef __GNUC__ #define __attribute__(x) #endif #if HAVE_CONFIG_H #include "config.h" #endif #include #include #include #include #include #include // for 64-bit types #if HAVE_STDINT_H #include #endif #if HAVE_INTTYPES_H #define __STDC_FORMAT_MACROS #include #endif #if HAVE_SYS_TYPES_H #include #endif #if !USE_WINDOWS #include #include #include #endif ///////////////////////////////////////////////////////////////////////////// // COMPILE-TIME CHECKS ///////////////////////////////////////////////////////////////////////////// #define STATIC_ASSERT(_cond,_name) typedef char STATIC_ASSERT_FAILED_ ## _name [ (_cond) ? 1 : -1 ] #define STATIC_SIZE_ASSERT(_type,_size) STATIC_ASSERT ( sizeof(_type)==_size, _type ## _MUST_BE_ ## _size ## _BYTES ) #ifndef __analysis_assume #define __analysis_assume(_arg) #endif ///////////////////////////////////////////////////////////////////////////// // PORTABILITY ///////////////////////////////////////////////////////////////////////////// #if _WIN32 #define WIN32_LEAN_AND_MEAN #include #define strcasecmp strcmpi #define strncasecmp _strnicmp #define snprintf _snprintf #define strtoll _strtoi64 #define strtoull _strtoui64 #else #if USE_ODBC // UnixODBC compatible DWORD #if defined(__alpha) || defined(__sparcv9) || defined(__LP64__) || (defined(__HOS_AIX__) && defined(_LP64)) || defined(__APPLE__) typedef unsigned int DWORD; #else typedef unsigned long DWORD; #endif #else // default DWORD typedef unsigned int DWORD; #endif // USE_ODBC typedef unsigned short WORD; typedef unsigned char BYTE; #endif // _WIN32 ///////////////////////////////////////////////////////////////////////////// // 64-BIT INTEGER TYPES AND MACROS ///////////////////////////////////////////////////////////////////////////// #if defined(U64C) || defined(I64C) #error "Internal 64-bit integer macros already defined." #endif #if !HAVE_STDINT_H #if defined(_MSC_VER) typedef __int64 int64_t; typedef unsigned __int64 uint64_t; #define U64C(v) v ## UI64 #define I64C(v) v ## I64 #define PRIu64 "I64d" #define PRIi64 "I64d" #else // !defined(_MSC_VER) typedef long long int64_t; typedef unsigned long long uint64_t; #endif // !defined(_MSC_VER) #endif // no stdint.h // if platform-specific macros were not supplied, use common defaults #ifndef U64C #define U64C(v) v ## ULL #endif #ifndef I64C #define I64C(v) v ## LL #endif #ifndef PRIu64 #define PRIu64 "llu" #endif #ifndef PRIi64 #define PRIi64 "lld" #endif #define UINT64_FMT "%" PRIu64 #define INT64_FMT "%" PRIi64 #ifndef UINT64_MAX #define UINT64_MAX U64C(0xffffffffffffffff) #endif #ifndef INT64_MAX #define INT64_MAX I64C(0x7fffffffffffffff) #endif STATIC_SIZE_ASSERT ( uint64_t, 8 ); STATIC_SIZE_ASSERT ( int64_t, 8 ); ///////////////////////////////////////////////////////////////////////////// // MEMORY MANAGEMENT ///////////////////////////////////////////////////////////////////////////// #define SPH_DEBUG_LEAKS 0 #define SPH_ALLOCS_PROFILER 0 #if SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER /// debug new that tracks memory leaks void * operator new ( size_t iSize, const char * sFile, int iLine ); /// debug new that tracks memory leaks void * operator new [] ( size_t iSize, const char * sFile, int iLine ); /// get current allocs count int sphAllocsCount (); /// total allocated bytes int64_t sphAllocBytes (); /// get last alloc id int sphAllocsLastID (); /// dump all allocs since given id void sphAllocsDump ( int iFile, int iSinceID ); /// dump stats to stdout void sphAllocsStats (); /// check all existing allocs; raises assertion failure in cases of errors void sphAllocsCheck (); void sphMemStatDump ( int iFD ); void sphMemStatMMapAdd ( int64_t iSize ); void sphMemStatMMapDel ( int64_t iSize ); #undef new #define new new(__FILE__,__LINE__) #endif // SPH_DEBUG_LEAKS || SPH_ALLOCS_PROFILER /// delete for my new void operator delete ( void * pPtr ); /// delete for my new void operator delete [] ( void * pPtr ); ///////////////////////////////////////////////////////////////////////////// // HELPERS ///////////////////////////////////////////////////////////////////////////// inline int sphBitCount ( DWORD n ) { // MIT HACKMEM count // works for 32-bit numbers only // fix last line for 64-bit numbers register DWORD tmp; tmp = n - ((n >> 1) & 033333333333) - ((n >> 2) & 011111111111); return ( (tmp + (tmp >> 3) ) & 030707070707) % 63; } typedef bool ( *SphDieCallback_t ) ( const char * ); /// crash with an error message void sphDie ( const char * sMessage, ... ) __attribute__ ( ( format ( printf, 1, 2 ) ) ); /// setup a callback function to call from sphDie() before exit /// if callback returns false, sphDie() will not log to stdout void sphSetDieCallback ( SphDieCallback_t pfDieCallback ); /// how much bits do we need for given int inline int sphLog2 ( uint64_t iValue ) { int iBits = 0; while ( iValue ) { iValue >>= 1; iBits++; } return iBits; } /// float vs dword conversion inline DWORD sphF2DW ( float f ) { union { float f; DWORD d; } u; u.f = f; return u.d; } /// dword vs float conversion inline float sphDW2F ( DWORD d ) { union { float f; DWORD d; } u; u.d = d; return u.f; } ////////////////////////////////////////////////////////////////////////// // RANDOM NUMBERS GENERATOR ////////////////////////////////////////////////////////////////////////// /// seed RNG void sphSrand ( DWORD uSeed ); /// auto-seed RNG based on time and PID void sphAutoSrand (); /// generate another random DWORD sphRand (); ///////////////////////////////////////////////////////////////////////////// // DEBUGGING ///////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS #ifndef NDEBUG void sphAssert ( const char * sExpr, const char * sFile, int iLine ); #undef assert #define assert(_expr) (void)( (_expr) || ( sphAssert ( #_expr, __FILE__, __LINE__ ), 0 ) ) #endif // !NDEBUG #endif // USE_WINDOWS #ifndef NDEBUG #define Verify(_expr) assert(_expr) #else #define Verify(_expr) _expr #endif ///////////////////////////////////////////////////////////////////////////// // GENERICS ///////////////////////////////////////////////////////////////////////////// #define Min(a,b) ((a)<(b)?(a):(b)) #define Max(a,b) ((a)>(b)?(a):(b)) #define SafeDelete(_x) { if (_x) { delete (_x); (_x) = NULL; } } #define SafeDeleteArray(_x) { if (_x) { delete [] (_x); (_x) = NULL; } } #define SafeRelease(_x) { if (_x) { (_x)->Release(); (_x) = NULL; } } /// swap template < typename T > inline void Swap ( T & v1, T & v2 ) { T temp = v1; v1 = v2; v2 = temp; } /// prevent copy class ISphNoncopyable { public: ISphNoncopyable () {} private: ISphNoncopyable ( const ISphNoncopyable & ) {} const ISphNoncopyable & operator = ( const ISphNoncopyable & ) { return *this; } }; ////////////////////////////////////////////////////////////////////////////// /// generic comparator template < typename T > struct SphLess_T { inline bool IsLess ( const T & a, const T & b ) const { return a < b; } }; /// generic comparator template < typename T > struct SphGreater_T { inline bool IsLess ( const T & a, const T & b ) const { return b < a; } }; /// generic comparator template < typename T, typename C > struct SphMemberLess_T { const T C::* m_pMember; explicit SphMemberLess_T ( T C::* pMember ) : m_pMember ( pMember ) {} inline bool IsLess ( const C & a, const C & b ) const { return ( (&a)->*m_pMember ) < ( (&b)->*m_pMember ); } }; template < typename T, typename C > inline SphMemberLess_T sphMemberLess ( T C::* pMember ) { return SphMemberLess_T ( pMember ); } /// generic accessor template < typename T > struct SphAccessor_T { typedef T MEDIAN_TYPE; MEDIAN_TYPE & Key ( T * a ) const { return *a; } void CopyKey ( MEDIAN_TYPE * pMed, T * pVal ) const { *pMed = Key(pVal); } void Swap ( T * a, T * b ) const { ::Swap ( *a, *b ); } T * Add ( T * p, int i ) const { return p+i; } int Sub ( T * b, T * a ) const { return (int)(b-a); } }; /// heap sort helper template < typename T, typename U, typename V > void sphSiftDown ( T * pData, int iStart, int iEnd, U COMP, V ACC ) { for ( ;; ) { int iChild = iStart*2+1; if ( iChild>iEnd ) break; int iChild1 = iChild+1; if ( iChild1<=iEnd && COMP.IsLess ( ACC.Key ( ACC.Add ( pData, iChild ) ), ACC.Key ( ACC.Add ( pData, iChild1 ) ) ) ) iChild = iChild1; if ( COMP.IsLess ( ACC.Key ( ACC.Add ( pData, iChild ) ), ACC.Key ( ACC.Add ( pData, iStart ) ) ) ) return; ACC.Swap ( ACC.Add ( pData, iChild ), ACC.Add ( pData, iStart ) ); iStart = iChild; } } /// heap sort template < typename T, typename U, typename V > void sphHeapSort ( T * pData, int iCount, U COMP, V ACC ) { if ( !pData || iCount<=1 ) return; // build a max-heap, so that the largest element is root for ( int iStart=( iCount-2 )>>1; iStart>=0; iStart-- ) sphSiftDown ( pData, iStart, iCount-1, COMP, ACC ); // now keep popping root into the end of array for ( int iEnd=iCount-1; iEnd>0; ) { ACC.Swap ( pData, ACC.Add ( pData, iEnd ) ); sphSiftDown ( pData, 0, --iEnd, COMP, ACC ); } } /// generic sort template < typename T, typename U, typename V > void sphSort ( T * pData, int iCount, U COMP, V ACC ) { if ( iCount<2 ) return; typedef T * P; P st0[32], st1[32], a, b, i, j; typename V::MEDIAN_TYPE x; int k; const int SMALL_THRESH = 32; int iDepthLimit = sphLog2 ( iCount ); iDepthLimit = ( ( iDepthLimit<<2 ) + iDepthLimit ) >> 1; // x2.5 k = 1; st0[0] = pData; st1[0] = ACC.Add ( pData, iCount-1 ); while ( k ) { k--; i = a = st0[k]; j = b = st1[k]; // if quicksort fails on this data; switch to heapsort if ( !k ) { if ( !--iDepthLimit ) { sphHeapSort ( a, ACC.Sub ( b, a )+1, COMP, ACC ); return; } } // for tiny arrays, switch to insertion sort int iLen = ACC.Sub ( b, a ); if ( iLen<=SMALL_THRESH ) { for ( i=ACC.Add ( a, 1 ); i<=b; i=ACC.Add ( i, 1 ) ) { for ( j=i; j>a; ) { P j1 = ACC.Add ( j, -1 ); if ( COMP.IsLess ( ACC.Key(j1), ACC.Key(j) ) ) break; ACC.Swap ( j, j1 ); j = j1; } } continue; } ACC.CopyKey ( &x, ACC.Add ( a, iLen/2 ) ); while ( a=ACC.Sub ( b, i ) ) { if ( a void sphSort ( T * pData, int iCount, U COMP ) { sphSort ( pData, iCount, COMP, SphAccessor_T() ); } template < typename T > void sphSort ( T * pData, int iCount ) { sphSort ( pData, iCount, SphLess_T() ); } ////////////////////////////////////////////////////////////////////////// /// member functor, wraps object member access template < typename T, typename CLASS > struct SphMemberFunctor_T { const T CLASS::* m_pMember; explicit SphMemberFunctor_T ( T CLASS::* pMember ) : m_pMember ( pMember ) {} const T & operator () ( const CLASS & arg ) const { return (&arg)->*m_pMember; } inline bool IsLess ( const CLASS & a, const CLASS & b ) const { return (&a)->*m_pMember < (&b)->*m_pMember; } }; /// handy member functor generator template < typename T, typename CLASS > inline SphMemberFunctor_T < T, CLASS > bind ( T CLASS::* ptr ) { return SphMemberFunctor_T < T, CLASS > ( ptr ); } /// identity functor template < typename T > struct SphIdentityFunctor_T { const T & operator () ( const T & arg ) const { return arg; } }; ////////////////////////////////////////////////////////////////////////// /// generic binary search template < typename T, typename U, typename PRED > T * sphBinarySearch ( T * pStart, T * pEnd, const PRED & tPred, U tRef ) { if ( !pStart || pEnd1 ) { if ( tReftPred(*pEnd) ) break; assert ( tRef>tPred(*pStart) ); assert ( tRef T * sphBinarySearch ( T * pStart, T * pEnd, T & tRef ) { return sphBinarySearch ( pStart, pEnd, SphIdentityFunctor_T(), tRef ); } ////////////////////////////////////////////////////////////////////////// /// default vector policy /// grow 2x and copy using assignment operator on resize template < typename T > class CSphVectorPolicy { protected: static const int MAGIC_INITIAL_LIMIT = 8; public: static inline void Copy ( T * pNew, T * pData, int iLength ) { for ( int i=0; i > class CSphVector { public: /// ctor CSphVector () : m_iLength ( 0 ) , m_iLimit ( 0 ) , m_pData ( NULL ) { } /// ctor with initial size CSphVector ( int iCount ) : m_iLength ( 0 ) , m_iLimit ( 0 ) , m_pData ( NULL ) { Resize ( iCount ); } /// copy ctor CSphVector ( const CSphVector & rhs ) { m_iLength = 0; m_iLimit = 0; m_pData = NULL; *this = rhs; } /// dtor ~CSphVector () { Reset (); } /// add entry T & Add () { if ( m_iLength>=m_iLimit ) Reserve ( 1+m_iLength ); return m_pData [ m_iLength++ ]; } /// add entry void Add ( const T & tValue ) { if ( m_iLength>=m_iLimit ) Reserve ( 1+m_iLength ); m_pData [ m_iLength++ ] = tValue; } /// add unique entry (ie. do not add if equal to last one) void AddUnique ( const T & tValue ) { if ( m_iLength>=m_iLimit ) Reserve ( 1+m_iLength ); if ( m_iLength==0 || m_pData[m_iLength-1]!=tValue ) m_pData [ m_iLength++ ] = tValue; } /// get first entry ptr T * Begin () { return m_iLength ? m_pData : NULL; } /// get first entry ptr const T * Begin () const { return m_iLength ? m_pData : NULL; } /// get last entry T & Last () { return (*this) [ m_iLength-1 ]; } /// get last entry const T & Last () const { return (*this) [ m_iLength-1 ]; } /// remove element by index void Remove ( int iIndex ) { assert ( iIndex>=0 && iIndex=0 && iIndex0 ); return m_pData[--m_iLength]; } public: /// grow enough to hold that much entries, if needed, but do *not* change the length void Reserve ( int iNewLimit ) { // check that we really need to be called assert ( iNewLimit>=0 ); if ( iNewLimit<=m_iLimit ) return; // calc new limit m_iLimit = POLICY::Relimit ( m_iLimit, iNewLimit ); // realloc // FIXME! optimize for POD case T * pNew = new T [ m_iLimit ]; __analysis_assume ( m_iLength<=m_iLimit ); POLICY::Copy ( pNew, m_pData, m_iLength ); delete [] m_pData; m_pData = pNew; } /// resize void Resize ( int iNewLength ) { if ( (unsigned int)iNewLength>=(unsigned int)m_iLength ) Reserve ( iNewLength ); m_iLength = iNewLength; } /// reset void Reset () { m_iLength = 0; m_iLimit = 0; SafeDeleteArray ( m_pData ); } /// query current length inline int GetLength () const { return m_iLength; } /// query current reserved size inline int GetLimit () const { return m_iLimit; } public: /// filter unique void Uniq () { if ( !m_iLength ) return; Sort (); int iSrc = 0, iDst = 0; while ( iSrc0 && m_pData[iDst-1]==m_pData[iSrc] ) iSrc++; else m_pData[iDst++] = m_pData[iSrc++]; } Resize ( iDst ); } /// default sort void Sort ( int iStart=0, int iEnd=-1 ) { Sort ( SphLess_T(), iStart, iEnd ); } /// default reverse sort void RSort ( int iStart=0, int iEnd=-1 ) { Sort ( SphGreater_T(), iStart, iEnd ); } /// generic sort template < typename F > void Sort ( F COMP, int iStart=0, int iEnd=-1 ) { if ( m_iLength<2 ) return; if ( iStart<0 ) iStart = m_iLength+iStart; if ( iEnd<0 ) iEnd = m_iLength+iEnd; assert ( iStart<=iEnd ); sphSort ( m_pData+iStart, iEnd-iStart+1, COMP ); } /// accessor by forward index const T & operator [] ( int iIndex ) const { assert ( iIndex>=0 && iIndex=0 && iIndex & operator = ( const CSphVector & rhs ) { Reset (); m_iLength = rhs.m_iLength; m_iLimit = rhs.m_iLimit; m_pData = new T [ m_iLimit ]; __analysis_assume ( m_iLength<=m_iLimit ); for ( int i=0; i & rhs ) { Swap ( m_iLength, rhs.m_iLength ); Swap ( m_iLimit, rhs.m_iLimit ); Swap ( m_pData, rhs.m_pData ); } /// leak T * LeakData () { T * pData = m_pData; m_pData = NULL; Reset(); return pData; } /// generic binary search /// assumes that the array is sorted in ascending order template < typename U, typename PRED > const T * BinarySearch ( const PRED & tPred, U tRef ) const { return sphBinarySearch ( m_pData, m_pData+m_iLength-1, tPred, tRef ); } /// generic binary search /// assumes that the array is sorted in ascending order const T * BinarySearch ( T tRef ) const { return sphBinarySearch ( m_pData, m_pData+m_iLength-1, tRef ); } /// generic linear search bool Contains ( T tRef ) const { for ( int i=0; i=m_iLimit ) Reserve ( m_iLength+1 ); memmove ( m_pData+iIndex+1, m_pData+iIndex, ( m_iLength++-iIndex ) * sizeof tValue ); memset ( m_pData+iIndex, 0, sizeof tValue ); m_pData[iIndex] = tValue; } protected: int m_iLength; ///< entries actually used int m_iLimit; ///< entries allocated T * m_pData; ///< entries }; #define ARRAY_FOREACH(_index,_array) \ for ( int _index=0; _index<_array.GetLength(); _index++ ) #define ARRAY_FOREACH_COND(_index,_array,_cond) \ for ( int _index=0; _index<_array.GetLength() && (_cond); _index++ ) #define ARRAY_ANY(_res,_array,_cond) \ false; \ for ( int _any=0; _any<_array.GetLength() && !_res; _any++ ) \ _res |= ( _cond ); \ #define ARRAY_ALL(_res,_array,_cond) \ true; \ for ( int _all=0; _all<_array.GetLength() && _res; _all++ ) \ _res &= ( _cond ); \ ////////////////////////////////////////////////////////////////////////// /// swap-vector policy (for non-copyable classes) /// use Swap() instead of assignment on resize template < typename T > class CSphSwapVectorPolicy : public CSphVectorPolicy { public: static inline void Copy ( T * pNew, T * pData, int iLength ) { for ( int i=0; i class CSphTightVectorPolicy : public CSphVectorPolicy { protected: static const int SLOW_GROW_TRESHOLD = 1024; public: static inline int Relimit ( int iLimit, int iNewLimit ) { if ( !iLimit ) iLimit = CSphVectorPolicy::MAGIC_INITIAL_LIMIT; while ( iLimit class CSphSwapVector : public CSphVector < T, CSphSwapVectorPolicy > { }; /// tight-vector template < typename T > class CSphTightVector : public CSphVector < T, CSphTightVectorPolicy > { }; ////////////////////////////////////////////////////////////////////////// /// dynamically allocated fixed-size vector template < typename T > class CSphFixedVector : public ISphNoncopyable { protected: T * m_pData; int m_iSize; public: explicit CSphFixedVector ( int iSize ) : m_iSize ( iSize ) { assert ( iSize>=0 ); m_pData = ( iSize>0 ) ? new T [ iSize ] : NULL; } ~CSphFixedVector () { SafeDeleteArray ( m_pData ); } T & operator [] ( int iIndex ) const { assert ( iIndex>=0 && iIndex=0 ); m_pData = ( iSize>0 ) ? new T [ iSize ] : NULL; m_iSize = iSize; } int GetLength() const { return m_iSize; } }; ////////////////////////////////////////////////////////////////////////// /// simple dynamic hash /// keeps the order, so Iterate() return the entries in the order they was inserted template < typename T, typename KEY, typename HASHFUNC, int LENGTH > class CSphOrderedHash { protected: struct HashEntry_t { KEY m_tKey; ///< key, owned by the hash T m_tValue; ///< data, owned by the hash HashEntry_t * m_pNextByHash; ///< next entry in hash list HashEntry_t * m_pPrevByOrder; ///< prev entry in the insertion order HashEntry_t * m_pNextByOrder; ///< next entry in the insertion order }; protected: HashEntry_t * m_dHash [ LENGTH ]; ///< all the hash entries HashEntry_t * m_pFirstByOrder; ///< first entry in the insertion order HashEntry_t * m_pLastByOrder; ///< last entry in the insertion order int m_iLength; ///< entries count protected: /// find entry by key HashEntry_t * FindByKey ( const KEY & tKey ) const { unsigned int uHash = ( (unsigned int) HASHFUNC::Hash ( tKey ) ) % LENGTH; HashEntry_t * pEntry = m_dHash [ uHash ]; while ( pEntry ) { if ( pEntry->m_tKey==tKey ) return pEntry; pEntry = pEntry->m_pNextByHash; } return NULL; } public: /// ctor CSphOrderedHash () : m_pFirstByOrder ( NULL ) , m_pLastByOrder ( NULL ) , m_iLength ( 0 ) , m_pIterator ( NULL ) { for ( int i=0; im_pNextByOrder; SafeDelete ( pKill ); pKill = pNext; } for ( int i=0; im_tKey==tKey ) return false; ppEntry = &pEntry->m_pNextByHash; pEntry = pEntry->m_pNextByHash; } // it's not; let's add the entry assert ( !pEntry ); assert ( !*ppEntry ); pEntry = new HashEntry_t; pEntry->m_tKey = tKey; pEntry->m_tValue = tValue; pEntry->m_pNextByHash = NULL; pEntry->m_pPrevByOrder = NULL; pEntry->m_pNextByOrder = NULL; *ppEntry = pEntry; if ( !m_pFirstByOrder ) m_pFirstByOrder = pEntry; if ( m_pLastByOrder ) { assert ( !m_pLastByOrder->m_pNextByOrder ); assert ( !pEntry->m_pNextByOrder ); m_pLastByOrder->m_pNextByOrder = pEntry; pEntry->m_pPrevByOrder = m_pLastByOrder; } m_pLastByOrder = pEntry; m_iLength++; return true; } /// add new entry /// returns the pointer to just inserted or previously cached (if dupe) value T* AddUnique ( const T & tValue, const KEY & tKey ) { unsigned int uHash = ( (unsigned int) HASHFUNC::Hash ( tKey ) ) % LENGTH; // check if this key is already hashed HashEntry_t * pEntry = m_dHash [ uHash ]; HashEntry_t ** ppEntry = &m_dHash [ uHash ]; while ( pEntry ) { if ( pEntry->m_tKey==tKey ) return &pEntry->m_tValue; ppEntry = &pEntry->m_pNextByHash; pEntry = pEntry->m_pNextByHash; } // it's not; let's add the entry assert ( !pEntry ); assert ( !*ppEntry ); pEntry = new HashEntry_t; pEntry->m_tKey = tKey; pEntry->m_tValue = tValue; pEntry->m_pNextByHash = NULL; pEntry->m_pPrevByOrder = NULL; pEntry->m_pNextByOrder = NULL; *ppEntry = pEntry; if ( !m_pFirstByOrder ) m_pFirstByOrder = pEntry; if ( m_pLastByOrder ) { assert ( !m_pLastByOrder->m_pNextByOrder ); assert ( !pEntry->m_pNextByOrder ); m_pLastByOrder->m_pNextByOrder = pEntry; pEntry->m_pPrevByOrder = m_pLastByOrder; } m_pLastByOrder = pEntry; m_iLength++; return &pEntry->m_tValue; } /// delete an entry bool Delete ( const KEY & tKey ) { unsigned int uHash = ( (unsigned int) HASHFUNC::Hash ( tKey ) ) % LENGTH; HashEntry_t * pEntry = m_dHash [ uHash ]; HashEntry_t * pPrevEntry = NULL; HashEntry_t * pToDelete = NULL; while ( pEntry ) { if ( pEntry->m_tKey==tKey ) { pToDelete = pEntry; if ( pPrevEntry ) pPrevEntry->m_pNextByHash = pEntry->m_pNextByHash; else m_dHash [ uHash ] = pEntry->m_pNextByHash; break; } pPrevEntry = pEntry; pEntry = pEntry->m_pNextByHash; } if ( !pToDelete ) return false; if ( pToDelete->m_pPrevByOrder ) pToDelete->m_pPrevByOrder->m_pNextByOrder = pToDelete->m_pNextByOrder; else m_pFirstByOrder = pToDelete->m_pNextByOrder; if ( pToDelete->m_pNextByOrder ) pToDelete->m_pNextByOrder->m_pPrevByOrder = pToDelete->m_pPrevByOrder; else m_pLastByOrder = pToDelete->m_pPrevByOrder; // step the iterator one item back - to gracefully hold deletion in iteration cycle if ( pToDelete==m_pIterator ) m_pIterator = pToDelete->m_pPrevByOrder; SafeDelete ( pToDelete ); --m_iLength; return true; } /// check if key exists bool Exists ( const KEY & tKey ) const { return FindByKey ( tKey )!=NULL; } /// get value pointer by key T * operator () ( const KEY & tKey ) const { HashEntry_t * pEntry = FindByKey ( tKey ); return pEntry ? &pEntry->m_tValue : NULL; } /// get value reference by key, asserting that the key exists in hash T & operator [] ( const KEY & tKey ) const { HashEntry_t * pEntry = FindByKey ( tKey ); assert ( pEntry && "hash missing value in operator []" ); return pEntry->m_tValue; } /// get pointer to key storage const KEY * GetKeyPtr ( const KEY & tKey ) const { HashEntry_t * pEntry = FindByKey ( tKey ); return pEntry ? &pEntry->m_tKey : NULL; } /// copying const CSphOrderedHash & operator = ( const CSphOrderedHash & rhs ) { if ( this!=&rhs ) { Reset (); rhs.IterateStart (); while ( rhs.IterateNext() ) Add ( rhs.IterateGet(), rhs.IterateGetKey() ); } return *this; } /// copyint ctor CSphOrderedHash ( const CSphOrderedHash & rhs ) : m_pFirstByOrder ( NULL ) , m_pLastByOrder ( NULL ) , m_iLength ( 0 ) , m_pIterator ( NULL ) { for ( int i=0; im_pNextByOrder : m_pFirstByOrder; return m_pIterator!=NULL; } /// get entry value T & IterateGet () const { assert ( m_pIterator ); return m_pIterator->m_tValue; } /// get entry key const KEY & IterateGetKey () const { assert ( m_pIterator ); return m_pIterator->m_tKey; } /// go to next existing entry in terms of external independed iterator bool IterateNext ( void ** ppCookie ) const { HashEntry_t ** ppIterator = reinterpret_cast < HashEntry_t** > ( ppCookie ); *ppIterator = ( *ppIterator ) ? ( *ppIterator )->m_pNextByOrder : m_pFirstByOrder; return ( *ppIterator )!=NULL; } /// get entry value in terms of external independed iterator static T & IterateGet ( void ** ppCookie ) { assert ( ppCookie ); HashEntry_t ** ppIterator = reinterpret_cast < HashEntry_t** > ( ppCookie ); assert ( *ppIterator ); return ( *ppIterator )->m_tValue; } /// get entry key in terms of external independed iterator static const KEY & IterateGetKey ( void ** ppCookie ) { assert ( ppCookie ); HashEntry_t ** ppIterator = reinterpret_cast < HashEntry_t** > ( ppCookie ); assert ( *ppIterator ); return ( *ppIterator )->m_tKey; } private: /// current iterator mutable HashEntry_t * m_pIterator; }; /// very popular and so, moved here struct IdentityHash_fn { template static inline INT Hash ( INT iValue ) { return iValue; } }; ///////////////////////////////////////////////////////////////////////////// /// immutable C string proxy struct CSphString { protected: char * m_sValue; private: /// safety gap after the string end; for instance, UTF-8 Russian stemmer /// which treats strings as 16-bit word sequences needs this in some cases. /// note that this zero-filled gap does NOT include trailing C-string zero, /// and does NOT affect strlen() as well. static const int SAFETY_GAP = 4; public: CSphString () : m_sValue ( NULL ) { } CSphString ( const CSphString & rhs ) : m_sValue ( NULL ) { *this = rhs; } virtual ~CSphString () { SafeDeleteArray ( m_sValue ); } const char * cstr () const { return m_sValue; } inline bool operator == ( const char * t ) const { if ( !t || !m_sValue ) return ( !t && !m_sValue ); return strcmp ( m_sValue, t )==0; } inline bool operator == ( const CSphString & t ) const { return operator==( t.cstr() ); } inline bool operator != ( const CSphString & t ) const { return !operator==( t ); } bool operator != ( const char * t ) const { return !operator==( t ); } CSphString ( const char * sString ) // NOLINT { if ( sString ) { int iLen = 1+strlen(sString); m_sValue = new char [ iLen+SAFETY_GAP ]; strcpy ( m_sValue, sString ); // NOLINT memset ( m_sValue+iLen, 0, SAFETY_GAP ); } else { m_sValue = NULL; } } const CSphString & operator = ( const CSphString & rhs ) { if ( m_sValue==rhs.m_sValue ) return *this; SafeDeleteArray ( m_sValue ); if ( rhs.m_sValue ) { int iLen = 1+strlen(rhs.m_sValue); m_sValue = new char [ iLen+SAFETY_GAP ]; strcpy ( m_sValue, rhs.m_sValue ); // NOLINT memset ( m_sValue+iLen, 0, SAFETY_GAP ); } return *this; } CSphString SubString ( int iStart, int iCount ) const { #ifndef NDEBUG int iLen = strlen(m_sValue); #endif assert ( iStart>=0 && iStart0 ); assert ( (iStart+iCount)>=0 && (iStart+iCount)<=iLen ); CSphString sRes; sRes.m_sValue = new char [ 1+SAFETY_GAP+iCount ]; strncpy ( sRes.m_sValue, m_sValue+iStart, iCount ); memset ( sRes.m_sValue+iCount, 0, 1+SAFETY_GAP ); return sRes; } void SetBinary ( const char * sValue, int iLen ) { SafeDeleteArray ( m_sValue ); if ( sValue ) { m_sValue = new char [ 1+SAFETY_GAP+iLen ]; memcpy ( m_sValue, sValue, iLen ); memset ( m_sValue+iLen, 0, 1+SAFETY_GAP ); } } void Reserve ( int iLen ) { SafeDeleteArray ( m_sValue ); m_sValue = new char [ 1+SAFETY_GAP+iLen ]; memset ( m_sValue, 0, 1+SAFETY_GAP+iLen ); } const CSphString & SetSprintf ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ) { char sBuf[1024]; va_list ap; va_start ( ap, sTemplate ); vsnprintf ( sBuf, sizeof(sBuf), sTemplate, ap ); va_end ( ap ); (*this) = sBuf; return (*this); } const CSphString & SetSprintfVa ( const char * sTemplate, va_list ap ) { char sBuf[1024]; vsnprintf ( sBuf, sizeof(sBuf), sTemplate, ap ); (*this) = sBuf; return (*this); } bool IsEmpty () const { if ( !m_sValue ) return true; return ( (*m_sValue)=='\0' ); } void ToLower () { if ( m_sValue ) for ( char * s=m_sValue; *s; s++ ) *s = (char) tolower ( *s ); } void ToUpper () { if ( m_sValue ) for ( char * s=m_sValue; *s; s++ ) *s = (char) toupper ( *s ); } void Swap ( CSphString & rhs ) { ::Swap ( m_sValue, rhs.m_sValue ); } bool Begins ( const char * sPrefix ) const { if ( !m_sValue || !sPrefix ) return false; return strncmp ( m_sValue, sPrefix, strlen(sPrefix) )==0; } bool Ends ( const char * sPrefix ) const { if ( !m_sValue || !sPrefix ) return false; int iVal = strlen ( m_sValue ); int iPrefix = strlen ( sPrefix ); if ( iVal class SmallStringHash_T : public CSphOrderedHash < T, CSphString, CSphStrHashFunc, 256 > {}; ////////////////////////////////////////////////////////////////////////// /// pointer with automatic safe deletion when going out of scope template < typename T > class CSphScopedPtr : public ISphNoncopyable { public: explicit CSphScopedPtr ( T * pPtr ) { m_pPtr = pPtr; } ~CSphScopedPtr () { SafeDelete ( m_pPtr ); } T * operator -> () const { return m_pPtr; } T * Ptr () const { return m_pPtr; } CSphScopedPtr & operator = ( T * pPtr ) { SafeDelete ( m_pPtr ); m_pPtr = pPtr; return *this; } T * LeakPtr () { T * pPtr = m_pPtr; m_pPtr = NULL; return pPtr; } protected: T * m_pPtr; }; ////////////////////////////////////////////////////////////////////////// /// refcounted base /// WARNING, FOR SINGLE-THREADED USE ONLY struct ISphRefcounted : public ISphNoncopyable { protected: ISphRefcounted () : m_iRefCount ( 1 ) {} virtual ~ISphRefcounted () {} public: void AddRef () const { m_iRefCount++; } void Release () const { --m_iRefCount; assert ( m_iRefCount>=0 ); if ( m_iRefCount==0 ) delete this; } protected: mutable int m_iRefCount; }; /// automatic pointer wrapper for refcounted objects /// construction from or assignment of a raw pointer takes over (!) the ownership template < typename T > class CSphRefcountedPtr { public: explicit CSphRefcountedPtr () { m_pPtr = NULL; } ///< default NULL wrapper construction (for vectors) explicit CSphRefcountedPtr ( T * pPtr ) { m_pPtr = pPtr; } ///< construction from raw pointer, takes over ownership! ~CSphRefcountedPtr () { if ( m_pPtr ) m_pPtr->Release(); } T * Ptr () const { return m_pPtr; } T * operator -> () const { return m_pPtr; } bool operator ! () const { return m_pPtr==NULL; } public: /// assignment of a raw pointer, takes over ownership! CSphRefcountedPtr & operator = ( T * pPtr ) { if ( m_pPtr && m_pPtr!=pPtr ) m_pPtr->Release(); m_pPtr = pPtr; return *this; } /// wrapper assignment, does automated reference tracking CSphRefcountedPtr & operator = ( const CSphRefcountedPtr & rhs ) { if ( rhs.m_pPtr ) rhs.m_pPtr->AddRef(); if ( m_pPtr ) m_pPtr->Release(); m_pPtr = rhs.m_pPtr; return *this; } protected: T * m_pPtr; }; ////////////////////////////////////////////////////////////////////////// extern bool g_bHeadProcess; void sphWarn ( const char *, ... ) __attribute__ ( ( format ( printf, 1, 2 ) ) ); /// in-memory buffer shared between processes template < typename T > class CSphSharedBuffer { public: /// ctor CSphSharedBuffer () : m_pData ( NULL ) , m_iLength ( 0 ) , m_iEntries ( 0 ) , m_bMlock ( false ) {} /// dtor ~CSphSharedBuffer () { Reset (); } /// set locking mode for subsequent Alloc()s void SetMlock ( bool bMlock ) { m_bMlock = bMlock; } public: /// allocate storage #if USE_WINDOWS bool Alloc ( int64_t iEntries, CSphString & sError, CSphString & ) #else bool Alloc ( int64_t iEntries, CSphString & sError, CSphString & sWarning ) #endif { assert ( !m_pData ); int64_t uCheck = sizeof(T); uCheck *= iEntries; m_iLength = (size_t)uCheck; if ( uCheck!=(int64_t)m_iLength ) { sError.SetSprintf ( "impossible to mmap() over 4 GB on 32-bit system" ); m_iLength = 0; return false; } #if USE_WINDOWS m_pData = new T [ (size_t)iEntries ]; #else m_pData = (T *) mmap ( NULL, m_iLength, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_ANON, -1, 0 ); if ( m_pData==MAP_FAILED ) { if ( m_iLength>0x7fffffffUL ) sError.SetSprintf ( "mmap() failed: %s (length="INT64_FMT" is over 2GB, impossible on some 32-bit systems)", strerror(errno), (int64_t)m_iLength ); else sError.SetSprintf ( "mmap() failed: %s (length="INT64_FMT")", strerror(errno), (int64_t)m_iLength ); m_iLength = 0; return false; } if ( m_bMlock ) if ( -1==mlock ( m_pData, m_iLength ) ) sWarning.SetSprintf ( "mlock() failed: %s", strerror(errno) ); #if SPH_ALLOCS_PROFILER sphMemStatMMapAdd ( m_iLength ); #endif #endif // USE_WINDOWS assert ( m_pData ); m_iEntries = (size_t)iEntries; return true; } /// relock again (for daemonization only) #if USE_WINDOWS bool Mlock ( const char *, CSphString & ) { return true; } #else bool Mlock ( const char * sPrefix, CSphString & sError ) { if ( !m_bMlock ) return true; if ( mlock ( m_pData, m_iLength )!=-1 ) return true; if ( sError.IsEmpty() ) sError.SetSprintf ( "%s mlock() failed: bytes="INT64_FMT", error=%s", sPrefix, (int64_t)m_iLength, strerror(errno) ); else sError.SetSprintf ( "%s; %s mlock() failed: bytes="INT64_FMT", error=%s", sError.cstr(), sPrefix, (int64_t)m_iLength, strerror(errno) ); return false; } #endif /// deallocate storage void Reset () { if ( !m_pData ) return; #if USE_WINDOWS delete [] m_pData; #else if ( g_bHeadProcess ) { int iRes = munmap ( m_pData, m_iLength ); if ( iRes ) sphWarn ( "munmap() failed: %s", strerror(errno) ); #if SPH_ALLOCS_PROFILER sphMemStatMMapDel ( m_iLength ); #endif } #endif // USE_WINDOWS m_pData = NULL; m_iLength = 0; m_iEntries = 0; } public: /// accessor inline const T & operator [] ( int64_t iIndex ) const { assert ( iIndex>=0 && iIndex<(int64_t)m_iEntries ); return m_pData[iIndex]; } /// get write address T * GetWritePtr () const { return m_pData; } /// check if i'm empty bool IsEmpty () const { return m_pData==NULL; } /// get length in bytes size_t GetLength () const { return m_iLength; } /// get length in entries size_t GetNumEntries () const { return m_iEntries; } protected: T * m_pData; ///< data storage size_t m_iLength; ///< data length, bytes size_t m_iEntries; ///< data length, entries bool m_bMlock; ///< whether to lock data in RAM }; ////////////////////////////////////////////////////////////////////////// /// process-shared mutex that survives fork class CSphProcessSharedMutex { public: explicit CSphProcessSharedMutex ( int iExtraSize=0 ); void Lock () const; void Unlock () const; bool TimedLock ( int tmSpin ) const; // wait at least tmSpin microseconds the lock will available const char * GetError () const; protected: #if !USE_WINDOWS CSphSharedBuffer m_pStorage; pthread_mutex_t * m_pMutex; CSphString m_sError; #endif }; #if !USE_WINDOWS /// process-shared mutex variable that survives fork template < typename T > class CSphProcessSharedVariable : protected CSphProcessSharedMutex, public ISphNoncopyable { public: explicit CSphProcessSharedVariable ( const T& tInitValue ) : CSphProcessSharedMutex ( sizeof(T) ) , m_pValue ( NULL ) { if ( m_pMutex ) { m_pValue = reinterpret_cast ( m_pStorage.GetWritePtr () + sizeof ( pthread_mutex_t ) ); *m_pValue = tInitValue; } } T ReadValue() const { if ( !m_pValue ) return 0; Lock(); T val = *m_pValue; Unlock(); return val; } void WriteValue ( const T& tNewValue ) { if ( !m_pValue ) return; Lock(); *m_pValue = tNewValue; Unlock(); } protected: T * m_pValue; }; #endif // #if !USE_WINDOWS ////////////////////////////////////////////////////////////////////////// /// my thread handle and thread func magic #if USE_WINDOWS typedef HANDLE SphThread_t; typedef DWORD SphThreadKey_t; #else typedef pthread_t SphThread_t; typedef pthread_key_t SphThreadKey_t; #endif /// my threading initialize routine void * sphThreadInit ( bool bDetached=false ); /// my threading deinitialize routine void sphThreadDone ( int iFD ); /// my create thread wrapper bool sphThreadCreate ( SphThread_t * pThread, void (*fnThread)(void*), void * pArg, bool bDetached=false ); /// my join thread wrapper bool sphThreadJoin ( SphThread_t * pThread ); /// add (cleanup) callback to run on thread exit void sphThreadOnExit ( void (*fnCleanup)(void*), void * pArg ); /// alloc thread-local key bool sphThreadKeyCreate ( SphThreadKey_t * pKey ); /// free thread-local key void sphThreadKeyDelete ( SphThreadKey_t tKey ); /// get thread-local key value void * sphThreadGet ( SphThreadKey_t tKey ); /// get the pointer to my thread's stack void * sphMyStack (); /// get size of used stack int64_t sphGetStackUsed(); /// get the size of my thread's stack int sphMyStackSize (); /// set the size of my thread's stack void sphSetMyStackSize ( int iStackSize ); /// store the address in the TLS void MemorizeStack ( void* PStack ); /// set thread-local key value bool sphThreadSet ( SphThreadKey_t tKey, void * pValue ); #if !USE_WINDOWS /// what kind of threading lib do we have? The number of frames in the stack depends from it bool sphIsLtLib(); #endif ////////////////////////////////////////////////////////////////////////// /// mutex implementation class CSphMutex { public: CSphMutex () : m_bInitialized ( false ) {} ~CSphMutex () { assert ( !m_bInitialized ); } bool Init (); bool Done (); bool Lock (); bool Unlock (); protected: bool m_bInitialized; #if USE_WINDOWS HANDLE m_hMutex; #else pthread_mutex_t m_tMutex; #endif }; /// static mutex (for globals) class CSphStaticMutex : public CSphMutex { public: CSphStaticMutex() { Verify ( Init() ); } ~CSphStaticMutex() { Done(); } }; /// scoped mutex lock template < typename T > class CSphScopedLock : ISphNoncopyable { public: /// lock on creation explicit CSphScopedLock ( T & tMutex ) : m_tMutexRef ( tMutex ) { m_tMutexRef.Lock(); } /// unlock on going out of scope ~CSphScopedLock () { m_tMutexRef.Unlock (); } protected: T & m_tMutexRef; }; /// MT-aware refcounted base /// mutex protected, might be slow struct ISphRefcountedMT : public ISphNoncopyable { protected: ISphRefcountedMT () : m_iRefCount ( 1 ) { m_tLock.Init(); } virtual ~ISphRefcountedMT () { m_tLock.Done(); } public: void AddRef () const { m_tLock.Lock(); m_iRefCount++; m_tLock.Unlock(); } void Release () const { m_tLock.Lock(); int iRefs = --m_iRefCount; assert ( iRefs>=0 ); m_tLock.Unlock(); if ( iRefs==0 ) delete this; } protected: mutable int m_iRefCount; mutable CSphMutex m_tLock; }; /// rwlock implementation class CSphRwlock { public: CSphRwlock (); ~CSphRwlock () {} bool Init (); bool Done (); bool ReadLock (); bool WriteLock (); bool Unlock (); #if USE_WINDOWS private: HANDLE m_hWriteMutex; HANDLE m_hReadEvent; LONG m_iReaders; #else pthread_rwlock_t m_tLock; #endif }; // small bitvector of 256 elements. class CSphSmallBitvec { public: static const int iTOTALBITS = 256; private: static const int iELEMBITS = sizeof(DWORD) * 8; static const int iBYTESIZE = iTOTALBITS / 8; static const int IELEMENTS = iTOTALBITS / iELEMBITS; static const DWORD uALLBITS = (DWORD)(~(0UL)); STATIC_ASSERT ( IELEMENTS>=1, 8_BITS_MINIMAL_SIZE_OF_VECTOR ); public: DWORD m_dFieldsMask[IELEMENTS]; public: // no custom cstr and d-tor - to be usable from inside unions // deep copy for it is ok - so, no explicit copying constructor and operator= // old-fashion layer to work with DWORD (32-bit) mask. // all bits above 32 assumed to be unset. void Assign32 ( DWORD uMask ) { Unset(); m_dFieldsMask[0] = uMask; } DWORD GetMask32 () const { return (DWORD) ( m_dFieldsMask[0] & 0xFFFFFFFFUL ); } // set n-th bit, or all void Set ( int iIdx=-1 ) { assert ( iIdx < iTOTALBITS ); if ( iIdx<0 ) for ( int i=0; i=0 && iIdx=iTOTALBITS ) return; int iMaskPos = iBits / iELEMBITS; DWORD uMask = ( 1UL << ( iBits % iELEMBITS ) ) - 1; m_dFieldsMask[iMaskPos++] &= uMask; for ( ; iMaskPos < IELEMENTS; iMaskPos++ ) m_dFieldsMask[iMaskPos] = 0UL; } void Negate() { for ( int i=0; i0 ); m_iElements = iElements; if ( iElements > int(sizeof(m_uStatic)*8) ) { int iSize = (m_iElements+31)/32; m_pData = new DWORD [ iSize ]; memset ( m_pData, 0, sizeof(DWORD)*iSize ); } else { m_pData = m_uStatic; for ( int i=0; i=0 ); assert ( iIndex>5 ] & ( 1UL<<( iIndex&31 ) ) )!=0; // NOLINT } void BitSet ( int iIndex ) { assert ( iIndex>=0 ); assert ( iIndex>5 ] |= ( 1UL<<( iIndex&31 ) ); // NOLINT } }; #endif // _sphinxstd_ // // $Id: sphinxstd.h 3129 2012-03-01 07:18:52Z tomat $ // sphinx-2.0.4-release/src/sphinxexcerpt.h0000644000176700017710000000647711711621267017631 0ustar deogardeogar// // $Id: sphinxexcerpt.h 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxexcerpt_ #define _sphinxexcerpt_ #include "sphinx.h" /// a query to generate an excerpt /// everything string is expected to be UTF-8 struct ExcerptQuery_t { public: CSphString m_sSource; ///< source text (or file name, see m_bLoadFiles) CSphString m_sWords; ///< words themselves CSphString m_sBeforeMatch; ///< string to insert before each match CSphString m_sAfterMatch; ///< string to insert after each match CSphString m_sChunkSeparator; ///< string to insert between matching chunks (in limited mode only) CSphString m_sStripMode; ///< strip mode int m_iLimit; ///< max chars in snippet (0 if unlimited) int m_iLimitWords; ///< max words in snippet int m_iLimitPassages; ///< max passages in snippet int m_iAround; ///< how much words to highlight around each match int m_iPassageId; ///< current %PASSAGE_ID% counter value (must start at 1) int m_iPassageBoundary; ///< passage boundary mode bool m_bRemoveSpaces; ///< whether to collapse whitespace bool m_bExactPhrase; ///< whether to highlight exact phrase matches only bool m_bUseBoundaries; ///< whether to extract passages by phrase boundaries setup in tokenizer bool m_bWeightOrder; ///< whether to order best passages in document (default) or weight order bool m_bHighlightQuery; ///< whether try to highlight the whole query, or always word-by-word bool m_bForceAllWords; ///< whether to ignore limit until all needed keywords are highlighted (#448) int m_iLoadFiles; ///< whether to interpret source as text (0) or file name (!0) bool m_bAllowEmpty; ///< whether to allow empty snippets (by default, return something from the start) bool m_bEmitZones; ///< whether to emit zone for passage int m_iRawFlags; ///< flags as they received from proto (to avoid coding/decoding to agents) CSphString m_sRawPassageBoundary; ///< boundary as it received from proto (to avoid coding/decoding to agents) public: int64_t m_iSize; ///< file size, to sort to work-queue order int m_iSeq; ///< request order, to sort back to request order int m_iNext; ///< the next one in one-link list for batch processing. -1 terminate the list. -2 sign of other (out-of-the-lists) char * m_sRes; ///< snippet result holder (NOT owned) CSphString m_sError; ///< snippet error message bool m_bHasBeforePassageMacro; bool m_bHasAfterPassageMacro; CSphString m_sBeforeMatchPassage; CSphString m_sAfterMatchPassage; public: ExcerptQuery_t (); }; /// an excerpt generator /// returns a newly allocated string in encoding specified by tokenizer on success /// returns NULL on failure char * sphBuildExcerpt ( ExcerptQuery_t &, CSphDict *, ISphTokenizer *, const CSphSchema *, CSphIndex *, CSphString & sError, const CSphHTMLStripper *, ISphTokenizer * ); #endif // _sphinxexcerpt_ // // $Id: sphinxexcerpt.h 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinx.cpp0000644000176700017710000235617611723747477016616 0ustar deogardeogar// // $Id: sphinx.cpp 3134 2012-03-01 19:34:23Z tomat $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxstem.h" #include "sphinxquery.h" #include "sphinxutils.h" #include "sphinxexpr.h" #include "sphinxfilter.h" #include "sphinxint.h" #include "sphinxsearch.h" #include #include #include #include #include #include #include #include #include #include #include #define SPH_UNPACK_BUFFER_SIZE 4096 #define SPH_READ_PROGRESS_CHUNK (8192*1024) #define SPH_READ_NOPROGRESS_CHUNK (32768*1024) #if USE_LIBSTEMMER #include "libstemmer.h" #endif #if USE_LIBEXPAT #define XMLIMPORT #include "expat.h" // workaround for expat versions prior to 1.95.7 #ifndef XMLCALL #define XMLCALL #endif #endif #if USE_LIBXML #include #endif #if USE_LIBICONV #include "iconv.h" #endif #if USE_ZLIB #include #endif #if USE_ODBC #include #endif #if USE_WINDOWS #include // for open() // workaround Windows quirks #define popen _popen #define pclose _pclose #define snprintf _snprintf #define sphSeek _lseeki64 #define stat _stat64 #define fstat _fstat64 #if _MSC_VER<1400 #define struct_stat __stat64 #else #define struct_stat struct _stat64 #endif #define ICONV_INBUF_CONST 1 #else #include #include #define sphSeek lseek #define struct_stat struct stat #endif #if ( USE_WINDOWS && USE_MYSQL ) #pragma comment(linker, "/defaultlib:libmysql.lib") #pragma message("Automatically linking with libmysql.lib") #endif #if ( USE_WINDOWS && USE_PGSQL ) #pragma comment(linker, "/defaultlib:libpq.lib") #pragma message("Automatically linking with libpq.lib") #endif #if ( USE_WINDOWS && USE_LIBSTEMMER ) #pragma comment(linker, "/defaultlib:libstemmer_c.lib") #pragma message("Automatically linking with libstemmer_c.lib") #endif #if ( USE_WINDOWS && USE_LIBEXPAT ) #pragma comment(linker, "/defaultlib:libexpat.lib") #pragma message("Automatically linking with libexpat.lib") #endif #if ( USE_WINDOWS && USE_LIBICONV ) #pragma comment(linker, "/defaultlib:iconv.lib") #pragma message("Automatically linking with iconv.lib") #endif #if ( USE_WINDOWS && USE_LIBXML ) #pragma comment(linker, "/defaultlib:libxml.lib") #pragma message("Automatically linking with libxml.lib") #endif ///////////////////////////////////////////////////////////////////////////// typedef Hitman_c<8> HITMAN; // logf() is not there sometimes (eg. Solaris 9) #if !USE_WINDOWS && !HAVE_LOGF static inline float logf ( float v ) { return (float) log ( v ); } #endif // forward decl void sphWarn ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 1, 2 ) ) ); size_t sphReadThrottled ( int iFD, void * pBuf, size_t iCount ); static bool sphTruncate ( int iFD ); ///////////////////////////////////////////////////////////////////////////// // GLOBALS ///////////////////////////////////////////////////////////////////////////// const char * SPHINX_DEFAULT_SBCS_TABLE = "0..9, A..Z->a..z, _, a..z, U+A8->U+B8, U+B8, U+C0..U+DF->U+E0..U+FF, U+E0..U+FF"; const char * SPHINX_DEFAULT_UTF8_TABLE = "0..9, A..Z->a..z, _, a..z, U+410..U+42F->U+430..U+44F, U+430..U+44F"; const char * MAGIC_WORD_SENTENCE = "\3sentence"; // emitted from source on sentence boundary, stored in dictionary const char * MAGIC_WORD_PARAGRAPH = "\3paragraph"; // emitted from source on paragraph boundary, stored in dictionary static const int DEFAULT_READ_BUFFER = 262144; static const int DEFAULT_READ_UNHINTED = 32768; static const int MIN_READ_BUFFER = 8192; static const int MIN_READ_UNHINTED = 1024; static bool g_bSphQuiet = false; static int g_iReadBuffer = DEFAULT_READ_BUFFER; static int g_iReadUnhinted = DEFAULT_READ_UNHINTED; // quick hack for indexer crash reporting // one day, these might turn into a callback or something int64_t g_iIndexerCurrentDocID = 0; int64_t g_iIndexerCurrentHits = 0; int64_t g_iIndexerCurrentRangeMin = 0; int64_t g_iIndexerCurrentRangeMax = 0; int64_t g_iIndexerPoolStartDocID = 0; int64_t g_iIndexerPoolStartHit = 0; ///////////////////////////////////////////////////////////////////////////// // COMPILE-TIME CHECKS ///////////////////////////////////////////////////////////////////////////// STATIC_SIZE_ASSERT ( SphOffset_t, 8 ); ///////////////////////////////////////////////////////////////////////////// // INTERNAL PROFILER ///////////////////////////////////////////////////////////////////////////// #define SPH_INTERNAL_PROFILER 0 #if SPH_INTERNAL_PROFILER enum ESphTimer { TIMER_root = 0, #define DECLARE_TIMER(_arg) TIMER_##_arg, #include "sphinxtimers.h" #undef DECLARE_TIMER TIMERS_TOTAL }; static const char * const g_dTimerNames [ TIMERS_TOTAL ] = { "root", #define DECLARE_TIMER(_arg) #_arg, #include "sphinxtimers.h" // NOLINT #undef DECLARE_TIMER }; struct CSphTimer { int64_t m_iMicroSec; ///< time as clocked raw int m_iCalls; ///< number of times this timer was called int m_iChildrenCalls; ///< number of times all subtimers (children, grandchildren etc) of this timer were called int64_t m_iMicroSecAdj; ///< guessed (!) time after timer costs adjustment, including subtimer costs int64_t m_iMicroSecSelf; ///< guessed (!) self time ESphTimer m_eTimer; int m_iParent; int m_iChild; int m_iNext; int m_iPrev; CSphTimer () { Alloc ( TIMER_root, -1 ); } void Alloc ( ESphTimer eTimer, int iParent ) { m_iParent = iParent; m_iChild = -1; m_iNext = -1; m_iPrev = -1; m_eTimer = eTimer; m_iMicroSec = 0; m_iMicroSecAdj = 0; m_iCalls = 0; m_iChildrenCalls = 0; } void Start () { m_iMicroSec -= sphMicroTimer (); m_iCalls++; } void Stop () { m_iMicroSec += sphMicroTimer (); } }; static const int SPH_MAX_TIMERS = 128; static const int SPH_TIMER_TRIALS = 16384; static int g_iTimer = -1; static int g_iTimers = 0; static CSphTimer g_dTimers [ SPH_MAX_TIMERS ]; static int64_t g_iTimerTrialsWall = 0; void sphProfilerInit () { assert ( g_iTimers==0 ); assert ( g_iTimer==-1 ); // start root timer g_iTimers = 1; g_iTimer = 0; g_dTimers[g_iTimer].Alloc ( TIMER_root, -1 ); g_dTimers[g_iTimer].Start (); } void sphProfilerPush ( ESphTimer eTimer ) { assert ( g_iTimer>=0 && g_iTimer0; iTimer=g_dTimers[iTimer].m_iNext ) { if ( g_dTimers[iTimer].m_eTimer==eTimer ) break; } // not found? let's alloc if ( iTimer<0 ) { assert ( g_iTimers=0 ) g_dTimers [ g_dTimers[g_iTimer].m_iChild ].m_iPrev = iTimer; g_dTimers[g_iTimer].m_iChild = iTimer; } // make it new current one assert ( iTimer>0 ); g_dTimers[iTimer].Start (); g_iTimer = iTimer; } void sphProfilerPop ( ESphTimer eTimer ) { assert ( g_iTimer>0 && g_iTimer=0 && g_iTimer0; iChild=g_dTimers[iChild].m_iNext ) { sphProfilerAdjust ( iChild ); tTimer.m_iChildrenCalls += g_dTimers[iChild].m_iCalls + g_dTimers[iChild].m_iChildrenCalls; } // adjust my raw time, remove all the timer costs from it // my own costs are 1x sphMicroTimer() call per start/stop cycle // subtimer costs are 2x sphMicroTimer() calls per start/stop cycle tTimer.m_iMicroSecAdj = tTimer.m_iMicroSec - ( ( tTimer.m_iCalls + 2*tTimer.m_iChildrenCalls )*g_iTimerTrialsWall / SPH_TIMER_TRIALS ); // now calculate self time // as adjusted time (all subtimer costs removed) minus all subtimer self time tTimer.m_iMicroSecSelf = tTimer.m_iMicroSecAdj; for ( int iChild=tTimer.m_iChild; iChild>0; iChild=g_dTimers[iChild].m_iNext ) tTimer.m_iMicroSecSelf -= g_dTimers[iChild].m_iMicroSecSelf; } void sphProfilerDone () { assert ( g_iTimers>0 ); assert ( g_iTimer==0 ); // stop root timer g_iTimers = 0; g_iTimer = -1; g_dTimers[0].Stop (); // bench adjustments for ( int iRun=0; iRun<3; iRun++ ) { int64_t iTrial = sphMicroTimer(); for ( int i=0; i0 && g_dTimers[iChild].m_iNext>0 ) iChild = g_dTimers[iChild].m_iNext; while ( iChild>0 ) { sphProfilerShow ( iChild, 1+iLevel ); iChild = g_dTimers[iChild].m_iPrev; } if ( iTimer==0 ) fprintf ( stdout, "---------------\n" ); } class CSphEasyTimer { public: explicit CSphEasyTimer ( ESphTimer eTimer ) : m_eTimer ( eTimer ) { if ( g_iTimer>=0 ) sphProfilerPush ( m_eTimer ); } ~CSphEasyTimer () { if ( g_iTimer>=0 ) sphProfilerPop ( m_eTimer ); } protected: ESphTimer m_eTimer; }; #define PROFILER_INIT() sphProfilerInit() #define PROFILER_DONE() sphProfilerDone() #define PROFILE_BEGIN(_arg) sphProfilerPush(TIMER_##_arg) #define PROFILE_END(_arg) sphProfilerPop(TIMER_##_arg) #define PROFILE_SHOW() sphProfilerShow() #define PROFILE(_arg) CSphEasyTimer __t_##_arg ( TIMER_##_arg ); #else #define PROFILER_INIT() #define PROFILER_DONE() #define PROFILE_BEGIN(_arg) #define PROFILE_END(_arg) #define PROFILE_SHOW() #define PROFILE(_arg) #endif // SPH_INTERNAL_PROFILER ///////////////////////////////////////////////////////////////////////////// #if !USE_WINDOWS bool g_bHeadProcess = true; void sphSetProcessInfo ( bool bHead ) { g_bHeadProcess = bHead; } #endif // USE_WINDOWS static bool g_bIOStats = false; static CSphIOStats g_IOStats; void sphStartIOStats () { g_bIOStats = true; memset ( &g_IOStats, 0, sizeof ( g_IOStats ) ); } const CSphIOStats & sphStopIOStats () { g_bIOStats = false; return g_IOStats; } static size_t sphRead ( int iFD, void * pBuf, size_t iCount ) { int64_t tmStart = 0; if ( g_bIOStats ) tmStart = sphMicroTimer(); size_t uRead = (size_t) ::read ( iFD, pBuf, iCount ); if ( g_bIOStats ) { g_IOStats.m_iReadTime += sphMicroTimer() - tmStart; g_IOStats.m_iReadOps++; g_IOStats.m_iReadBytes += iCount; } return uRead; } static void GetFileStats ( const char * szFilename, CSphSavedFile & tInfo ); ///////////////////////////////////////////////////////////////////////////// // INTERNAL SPHINX CLASSES DECLARATIONS ///////////////////////////////////////////////////////////////////////////// CSphAutofile::CSphAutofile () : m_iFD ( -1 ) , m_bTemporary ( false ) , m_bWouldTemporary ( false ) , m_pProgress ( NULL ) , m_pStat ( NULL ) { } CSphAutofile::CSphAutofile ( const CSphString & sName, int iMode, CSphString & sError, bool bTemp ) : m_iFD ( -1 ) , m_bTemporary ( false ) , m_bWouldTemporary ( false ) , m_pProgress ( NULL ) , m_pStat ( NULL ) { Open ( sName, iMode, sError, bTemp ); } CSphAutofile::~CSphAutofile () { Close (); } int CSphAutofile::Open ( const CSphString & sName, int iMode, CSphString & sError, bool bTemp ) { assert ( m_iFD==-1 && m_sFilename.IsEmpty () ); assert ( !sName.IsEmpty() ); m_iFD = ::open ( sName.cstr(), iMode, 0644 ); m_sFilename = sName; // not exactly sure why is this uncoditional. for error reporting later, i suppose if ( m_iFD<0 ) sError.SetSprintf ( "failed to open %s: %s", sName.cstr(), strerror(errno) ); else { m_bTemporary = bTemp; // only if we managed to actually open it m_bWouldTemporary = true; // if a shit happen - we could delete the file. } return m_iFD; } void CSphAutofile::Close () { if ( m_iFD>=0 ) { ::close ( m_iFD ); if ( m_bTemporary ) ::unlink ( m_sFilename.cstr() ); } m_iFD = -1; m_sFilename = ""; m_bTemporary = false; m_bWouldTemporary = false; } void CSphAutofile::SetTemporary() { m_bTemporary = m_bWouldTemporary; } const char * CSphAutofile::GetFilename () const { assert ( m_sFilename.cstr() ); return m_sFilename.cstr(); } SphOffset_t CSphAutofile::GetSize ( SphOffset_t iMinSize, bool bCheckSizeT, CSphString & sError ) { struct_stat st; if ( stat ( GetFilename(), &st )<0 ) { sError.SetSprintf ( "failed to stat %s: %s", GetFilename(), strerror(errno) ); return -1; } if ( st.st_size0 ) { int64_t iToReadOnce = ( m_pProgress && m_pStat ) ? Min ( SPH_READ_PROGRESS_CHUNK, iToRead ) : Min ( SPH_READ_NOPROGRESS_CHUNK, iToRead ); int64_t iGot = (int64_t) sphRead ( GetFD(), pCur, (size_t)iToReadOnce ); if ( iGot<=0 ) break; iToRead -= iGot; pCur += iGot; if ( m_pProgress && m_pStat ) { m_pStat->m_iBytes += iGot; m_pProgress ( m_pStat, false ); } } if ( iToRead!=0 ) { sError.SetSprintf ( "read error in %s; "INT64_FMT" of "INT64_FMT" bytes read", GetFilename(), iCount-iToRead, iCount ); return false; } return true; } void CSphAutofile::SetProgressCallback ( CSphIndex::ProgressCallback_t * pfnProgress, CSphIndexProgress * pStat ) { m_pProgress = pfnProgress; m_pStat = pStat; } ///////////////////////////////////////////////////////////////////////////// /// array pointer which self-destructs when going out of scope, or on demand template < typename T > class CSphAutoArray { protected: T * m_pData; #ifndef NDEBUG size_t m_iLength; // for pretty-printers to work #endif public: explicit CSphAutoArray ( int iCount ) #ifndef NDEBUG : m_iLength ( iCount ) #endif { m_pData = ( iCount>0 ) ? new T [ iCount ] : NULL; } ~CSphAutoArray () { Reset (); } void Reset () { SafeDeleteArray ( m_pData ); } const CSphAutoArray & operator = ( const CSphAutoArray & ) { assert(0); return *this; } operator T * () { return m_pData; } }; ///////////////////////////////////////////////////////////////////////////// /// generic stateless priority queue template < typename T, typename COMP > class CSphQueue { protected: T * m_pData; int m_iUsed; int m_iSize; public: /// ctor explicit CSphQueue ( int iSize ) : m_iUsed ( 0 ) , m_iSize ( iSize ) { assert ( iSize>0 ); m_pData = new T [ iSize ]; assert ( m_pData ); } /// dtor virtual ~CSphQueue () { SafeDeleteArray ( m_pData ); } /// add entry to the queue virtual bool Push ( const T & tEntry ) { if ( m_iUsed==m_iSize ) { // if it's worse that current min, reject it, else pop off current min if ( COMP::IsLess ( tEntry, m_pData[0] ) ) return true; else Pop (); } // do add m_pData [ m_iUsed ] = tEntry; int iEntry = m_iUsed++; // sift up if needed, so that worst (lesser) ones float to the top while ( iEntry ) { int iParent = ( iEntry-1 ) >> 1; if ( !COMP::IsLess ( m_pData[iEntry], m_pData[iParent] ) ) break; // entry is less than parent, should float to the top Swap ( m_pData[iEntry], m_pData[iParent] ); iEntry = iParent; } return true; } /// remove root (ie. top priority) entry virtual void Pop () { assert ( m_iUsed ); if ( !(--m_iUsed) ) // empty queue? just return return; // make the last entry my new root m_pData[0] = m_pData[m_iUsed]; // sift down if needed int iEntry = 0; for ( ;; ) { // select child int iChild = (iEntry<<1) + 1; if ( iChild>=m_iUsed ) break; // select smallest child if ( iChild+10 ) m_pDictBuf = new BYTE [iDictBufSize]; } virtual ~DiskIndexQwordSetup_c() { SafeDeleteArray ( m_pDictBuf ); } virtual ISphQword * QwordSpawn ( const XQKeyword_t & tWord ) const; virtual bool QwordSetup ( ISphQword * ) const; protected: template < class T > bool Setup ( ISphQword * ) const; }; #if USE_WINDOWS #pragma warning(disable:4127) // conditional expr is const for MSVC #endif /// query word from the searcher's point of view class DiskIndexQwordTraits_c : public ISphQword { static const int MINIBUFFER_LEN = 1024; public: SphOffset_t m_uHitPosition; Hitpos_t m_uInlinedHit; DWORD m_uHitState; bool m_bDupe; ///< whether the word occurs only once in current query CSphMatch m_tDoc; ///< current match (partial) Hitpos_t m_iHitPos; ///< current hit postition, from hitlist BYTE m_dDoclistBuf [ MINIBUFFER_LEN ]; BYTE m_dHitlistBuf [ MINIBUFFER_LEN ]; CSphReader m_rdDoclist; ///< my doclist reader CSphReader m_rdHitlist; ///< my hitlist reader SphDocID_t m_iMinID; ///< min ID to fixup int m_iInlineAttrs; ///< inline attributes count CSphRowitem * m_pInlineFixup; ///< inline attributes fixup (POINTER TO EXTERNAL DATA, NOT MANAGED BY THIS CLASS!) #ifndef NDEBUG bool m_bHitlistOver; #endif public: explicit DiskIndexQwordTraits_c ( bool bUseMini, bool bExcluded ) : m_uHitPosition ( 0 ) , m_uHitState ( 0 ) , m_bDupe ( false ) , m_iHitPos () , m_rdDoclist ( bUseMini ? m_dDoclistBuf : NULL, bUseMini ? MINIBUFFER_LEN : 0 ) , m_rdHitlist ( bUseMini ? m_dHitlistBuf : NULL, bUseMini ? MINIBUFFER_LEN : 0 ) , m_iMinID ( 0 ) , m_iInlineAttrs ( 0 ) , m_pInlineFixup ( NULL ) #ifndef NDEBUG , m_bHitlistOver ( true ) #endif { m_iHitPos = EMPTY_HIT; m_bExcluded = bExcluded; } }; /// query word from the searcher's point of view template < bool INLINE_HITS, bool INLINE_DOCINFO, bool DISABLE_HITLIST_SEEK > class DiskIndexQword_c : public DiskIndexQwordTraits_c { public: explicit DiskIndexQword_c ( bool bUseMinibuffer, bool bExcluded ) : DiskIndexQwordTraits_c ( bUseMinibuffer, bExcluded ) { } virtual void Reset () { m_uHitPosition = 0; m_uHitState = 0; m_rdDoclist.Reset (); m_rdHitlist.Reset (); ISphQword::Reset(); m_iHitPos = EMPTY_HIT; m_iInlineAttrs = 0; } void GetHitlistEntry () { assert ( !m_bHitlistOver ); DWORD iDelta = m_rdHitlist.UnzipInt (); if ( iDelta ) { m_iHitPos += iDelta; } else { m_iHitPos = EMPTY_HIT; #ifndef NDEBUG m_bHitlistOver = true; #endif } } virtual const CSphMatch & GetNextDoc ( DWORD * pDocinfo ) { SphDocID_t iDelta = m_rdDoclist.UnzipDocid(); if ( iDelta ) { m_bAllFieldsKnown = false; m_tDoc.m_iDocID += iDelta; if ( INLINE_DOCINFO ) { assert ( pDocinfo ); for ( int i=0; i> 1 ); m_bAllFieldsKnown = true; } else { m_dQwordFields.Assign32 ( uFirst ); m_uHitPosition += m_rdDoclist.UnzipOffset(); m_iHitlistPos = m_uHitPosition; } } else { SphOffset_t iDeltaPos = m_rdDoclist.UnzipOffset(); assert ( iDeltaPos>=0 ); m_iHitlistPos += iDeltaPos; m_dQwordFields.Assign32 ( m_rdDoclist.UnzipInt() ); m_uMatchHits = m_rdDoclist.UnzipInt(); } } else { m_tDoc.m_iDocID = 0; } return m_tDoc; } virtual void SeekHitlist ( SphOffset_t uOff ) { if ( uOff >> 63 ) { m_uHitState = 1; m_uInlinedHit = (DWORD)uOff; // truncate high dword } else { m_uHitState = 0; m_iHitPos = EMPTY_HIT; if ( DISABLE_HITLIST_SEEK ) assert ( m_rdHitlist.GetPos()==uOff ); // make sure we're where caller thinks we are. else m_rdHitlist.SeekTo ( uOff, READ_NO_SIZE_HINT ); } #ifndef NDEBUG m_bHitlistOver = false; #endif } virtual Hitpos_t GetNextHit () { assert ( m_bHasHitlist ); switch ( m_uHitState ) { case 0: // read hit from hitlist GetHitlistEntry (); return m_iHitPos; case 1: // return inlined hit m_uHitState = 2; return m_uInlinedHit; case 2: // return end-of-hitlist marker after inlined hit #ifndef NDEBUG m_bHitlistOver = true; #endif m_uHitState = 0; return EMPTY_HIT; } sphDie ( "INTERNAL ERROR: impossible hit emitter state" ); return EMPTY_HIT; } }; #if USE_WINDOWS #pragma warning(default:4127) // conditional expr is const for MSVC #endif ////////////////////////////////////////////////////////////////////////////// #define WITH_QWORD(INDEX, NO_SEEK, NAME, ACTION) \ { \ CSphIndex_VLN * pIndex = (CSphIndex_VLN *)INDEX; \ DWORD uInlineHits = pIndex->m_tSettings.m_eHitFormat==SPH_HIT_FORMAT_INLINE; \ DWORD uInlineDocinfo = pIndex->m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE; \ \ switch ( ( uInlineHits<<1 ) | uInlineDocinfo ) \ { \ case 0: { typedef DiskIndexQword_c < false, false, NO_SEEK > NAME; ACTION; break; } \ case 1: { typedef DiskIndexQword_c < false, true, NO_SEEK > NAME; ACTION; break; } \ case 2: { typedef DiskIndexQword_c < true, false, NO_SEEK > NAME; ACTION; break; } \ case 3: { typedef DiskIndexQword_c < true, true, NO_SEEK > NAME; ACTION; break; } \ default: \ sphDie ( "INTERNAL ERROR: impossible qword settings" ); \ } \ } ///////////////////////////////////////////////////////////////////////////// struct CSphWordlistCheckpoint { union { SphWordID_t m_iWordID; const char * m_sWord; }; SphOffset_t m_iWordlistOffset; }; // pre-v11 wordlist checkpoint struct CSphWordlistCheckpoint_v10 { SphWordID_t m_iWordID; DWORD m_iWordlistOffset; }; ///////////////////////////////////////////////////////////////////////////// /// ordinals accumulation and sorting struct Ordinal_t { SphDocID_t m_uDocID; ///< doc id CSphString m_sValue; ///< string value }; struct OrdinalEntry_t : public Ordinal_t { int m_iTag; }; struct OrdinalId_t { SphDocID_t m_uDocID; DWORD m_uId; }; struct OrdinalIdEntry_t : public OrdinalId_t { int m_iTag; }; void Swap ( Ordinal_t & a, Ordinal_t & b ) { Swap ( a.m_uDocID, b.m_uDocID ); Swap ( a.m_sValue, b.m_sValue ); } void Swap ( OrdinalEntry_t & a, OrdinalEntry_t & b ) { Swap ( a.m_uDocID, b.m_uDocID ); Swap ( a.m_sValue, b.m_sValue ); Swap ( a.m_iTag, b.m_iTag ); } ////////////////////////////////////////////////////////////////////////// static void ReadFileInfo ( CSphReader & tReader, const char * szFilename, CSphString & sWarning ) { SphOffset_t uSize = tReader.GetOffset (); SphOffset_t uCTime = tReader.GetOffset (); SphOffset_t uMTime = tReader.GetOffset (); DWORD uCRC32 = tReader.GetDword (); if ( szFilename && *szFilename ) { struct_stat tFileInfo; if ( stat ( szFilename, &tFileInfo ) < 0 ) sWarning.SetSprintf ( "failed to stat %s: %s", szFilename, strerror(errno) ); else { DWORD uMyCRC32 = 0; if ( !sphCalcFileCRC32 ( szFilename, uMyCRC32 ) ) sWarning.SetSprintf ( "failed to calculate CRC32 for %s", szFilename ); else if ( uMyCRC32!=uCRC32 || tFileInfo.st_size!=uSize || tFileInfo.st_ctime!=uCTime || tFileInfo.st_mtime!=uMTime ) sWarning.SetSprintf ( "'%s' differs from the original", szFilename ); } } } static void WriteFileInfo ( CSphWriter & tWriter, const CSphSavedFile & tInfo ) { tWriter.PutOffset ( tInfo.m_uSize ); tWriter.PutOffset ( tInfo.m_uCTime ); tWriter.PutOffset ( tInfo.m_uMTime ); tWriter.PutDword ( tInfo.m_uCRC32 ); } struct WordDictInfo_t { CSphString m_sWord; SphOffset_t m_uOff; int m_iDocs; int m_iHits; int m_iDoclistHint; WordDictInfo_t (); }; struct WordReaderContext_t { BYTE m_sWord [ MAX_KEYWORD_BYTES ]; int m_iLen; WordReaderContext_t(); }; // !COMMIT eliminate this, move it to proper dict impls class CWordlist : public ISphWordlist { public: int64_t m_iCheckpointsPos; ///< checkpoints offset CSphFixedVector m_dCheckpoints; ///< checkpoint offsets CSphAutofile m_tFile; ///< file int64_t m_iSize; ///< file size CSphSharedBuffer m_pBuf; ///< my cache int m_iMaxChunk; ///< max size of entry between checkpoints BYTE * m_pWords; ///< arena for checkpoint's words public: CWordlist (); ~CWordlist (); void Reset (); bool ReadCP ( CSphAutofile & tFile, DWORD uVer, bool bWordDict, CSphString & sError ); const CSphWordlistCheckpoint * FindCheckpoint ( const char * sWord, int iWordLen, SphWordID_t iWordID, bool bStarMode ) const; const BYTE * GetWord ( const BYTE * pBuf, const char * pStr, int iLen, WordDictInfo_t & tWord, bool bStarMode, WordReaderContext_t & tCtx ) const; bool GetWord ( const BYTE * pBuf, SphWordID_t iWordID, WordDictInfo_t & tWord ) const; const BYTE * AcquireDict ( const CSphWordlistCheckpoint * pCheckpoint, int iFD, BYTE * pDictBuf ) const; virtual void GetPrefixedWords ( const char * sWord, int iWordLen, CSphVector & dPrefixedWords, BYTE * pDictBuf, int iFD ) const; private: bool m_bWordDict; }; /// this is my actual VLN-compressed phrase index implementation class CSphIndex_VLN : public CSphIndex { friend class DiskIndexQwordSetup_c; friend class CSphMerger; friend class AttrIndexBuilder_t; public: explicit CSphIndex_VLN ( const char* sIndexName, const char * sFilename ); ~CSphIndex_VLN (); virtual int Build ( const CSphVector & dSources, int iMemoryLimit, int iWriteBuffer ); virtual bool LoadHeader ( const char * sHeaderName, bool bStripPath, CSphString & sWarning ); virtual bool WriteHeader ( CSphWriter & fdInfo, SphOffset_t iCheckpointsPos, DWORD iCheckpointCount ); virtual void DebugDumpHeader ( FILE * fp, const char * sHeaderName, bool bConfig ); virtual void DebugDumpDocids ( FILE * fp ); virtual void DebugDumpHitlist ( FILE * fp, const char * sKeyword, bool bID ); virtual int DebugCheck ( FILE * fp ); template void DumpHitlist ( FILE * fp, const char * sKeyword, bool bID ); virtual bool Prealloc ( bool bMlock, bool bStripPath, CSphString & sWarning ); virtual bool Mlock (); virtual void Dealloc (); virtual bool Preread (); template bool PrereadSharedBuffer ( CSphSharedBuffer & pBuffer, const char * sExt, size_t uExpected=0, DWORD uOffset=0 ); virtual void SetBase ( const char * sNewBase ); virtual bool Rename ( const char * sNewBase ); virtual bool Lock (); virtual void Unlock (); virtual void PostSetup() {} virtual bool MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const; virtual bool MultiQueryEx ( int iQueries, const CSphQuery * pQueries, CSphQueryResult ** ppResults, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const; virtual bool GetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const; template bool DoGetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const; virtual bool Merge ( CSphIndex * pSource, CSphVector & dFilters, bool bMergeKillLists ); template bool MergeWords ( CSphIndex_VLN * pSrcIndex, ISphFilter * pFilter ); virtual int UpdateAttributes ( const CSphAttrUpdate & tUpd, int iIndex, CSphString & sError ); virtual bool SaveAttributes (); virtual DWORD GetAttributeStatus () const; bool EarlyReject ( CSphQueryContext * pCtx, CSphMatch & tMatch ) const; virtual SphAttr_t * GetKillList () const; virtual int GetKillListSize () const { return m_iKillListSize; } virtual bool HasDocid ( SphDocID_t uDocid ) const; virtual const CSphSourceStats & GetStats () const { return m_tStats; } private: static const int MIN_WRITE_BUFFER = 262144; ///< min write buffer size static const int DEFAULT_WRITE_BUFFER = 1048576; ///< default write buffer size private: // common stuff CSphString m_sFilename; int m_iLockFD; CSphMatch * m_pMin; ///< min attribute values tracker CSphSourceStats m_tStats; ///< my stats SphDocID_t m_iMergeInfinum; ///< minimal docid-1 for merging private: // indexing-only BYTE * m_pWriteBuffer; ///< my write buffer (for temp files) int m_iWriteBuffer; ///< my write buffer size bool m_bWordDict; bool m_bMerging; CSphAggregateHit m_tLastHit; ///< hitlist entry BYTE m_sLastKeyword [ MAX_KEYWORD_BYTES ]; SphOffset_t m_iLastHitlistPos; ///< doclist entry SphOffset_t m_iLastHitlistDelta; ///< doclist entry CSphSmallBitvec m_dLastDocFields; ///< doclist entry DWORD m_uLastDocHits; ///< doclist entry SphOffset_t m_iLastWordDoclist; ///< wordlist entry int m_iLastWordDocs; ///< wordlist entry int m_iLastWordHits; ///< wordlist entry CSphWriter m_wrDoclist; ///< wordlist writer CSphWriter m_wrHitlist; ///< hitlist writer CSphIndexProgress m_tProgress; CSphVector m_dHitlessWords; bool LoadHitlessWords (); private: // searching-only, per-index static const int DOCINFO_HASH_BITS = 18; // FIXME! make this configurable CSphSharedBuffer m_pDocinfo; ///< my docinfo cache DWORD m_uDocinfo; ///< my docinfo cache size CSphSharedBuffer m_pDocinfoHash; ///< hashed ids, to accelerate lookups DWORD m_uDocinfoIndex; ///< docinfo "index" entries count (each entry is 2x docinfo rows, for min/max) DWORD * m_pDocinfoIndex; ///< docinfo "index", to accelerate filtering during full-scan (2x rows for each block, and 2x rows for the whole index, 1+m_uDocinfoIndex entries) CSphSharedBuffer m_pMva; ///< my multi-valued attrs cache CSphSharedBuffer m_pStrings; ///< my in-RAM strings cache CWordlist m_tWordlist; ///< my wordlist CSphSharedBuffer m_pKillList; ///< killlist DWORD m_iKillListSize; ///< killlist size (in elements) DWORD m_uMinMaxIndex; ///< stored min/max cache offset (counted in DWORDs) CSphAutofile m_tDoclistFile; ///< doclist file CSphAutofile m_tHitlistFile; ///< hitlist file #define SPH_SHARED_VARS_COUNT 2 DWORD * m_pPreread; DWORD * m_pAttrsStatus; CSphSharedBuffer m_dShared; ///< are we ready to search bool m_bPreallocated; ///< are we ready to preread DWORD m_uVersion; ///< data files version bool m_bUse64; ///< whether the header is id64 int m_iIndexTag; ///< my ids for MVA updates pool static int m_iIndexTagSeq; ///< static ids sequence bool m_bIsEmpty; ///< do we have actually indexed documents (m_iTotalDocuments is just fetched documents, not indexed!) private: CSphString GetIndexFileName ( const char * sExt ) const; int cidxWriteRawVLB ( int fd, CSphWordHit * pHit, int iHits, DWORD * pDocinfo, int Docinfos, int iStride ); void cidxFinishDoclistEntry ( Hitpos_t uLastPos ); void cidxHit ( CSphAggregateHit * pHit, CSphRowitem * pDocinfos ); bool cidxDone ( const char * sHeaderExtension, int iMemLimit ); bool ParsedMultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const XQQuery_t & tXQ, CSphDict * pDict, const CSphVector * pExtraFilters, CSphQueryNodeCache * pNodeCache, int iTag ) const; bool MultiScan ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const; bool MatchExtended ( CSphQueryContext * pCtx, const CSphQuery * pQuery, int iSorters, ISphMatchSorter ** ppSorters, ISphRanker * pRanker, int iTag ) const; const DWORD * FindDocinfo ( SphDocID_t uDocID ) const; void CopyDocinfo ( CSphQueryContext * pCtx, CSphMatch & tMatch, const DWORD * pFound ) const; bool BuildMVA ( const CSphVector & dSources, CSphAutoArray & dHits, int iArenaSize, int iFieldFD, int nFieldMVAs, int iFieldMVAInPool ); CSphDict * SetupStarDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer & tTokenizer ) const; CSphDict * SetupExactDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer & tTokenizer ) const; bool RelocateBlock ( int iFile, BYTE * pBuffer, int iRelocationSize, SphOffset_t * pFileSize, CSphBin * pMinBin, SphOffset_t * pSharedOffset ); bool PrecomputeMinMax(); private: static const int MAX_ORDINAL_STR_LEN = 4096; ///< maximum ordinal string length in bytes static const int ORDINAL_READ_SIZE = 262144; ///< sorted ordinal id read buffer size in bytes ESphBinRead ReadOrdinal ( CSphBin & Reader, Ordinal_t & Ordinal ); SphOffset_t DumpOrdinals ( CSphWriter & Writer, CSphVector & dOrdinals ); bool SortOrdinals ( const char * szToFile, int iFromFD, int iArenaSize, int iOrdinalsInPool, CSphVector< CSphVector > & dOrdBlockSize, bool bWarnOfMem ); bool SortOrdinalIds ( const char * szToFile, int iFromFD, int iArenaSize, CSphVector < CSphVector < SphOffset_t > > & dOrdBlockSize, bool bWarnOfMem ); const DWORD * GetMVAPool () const { return m_pMva.GetWritePtr(); } bool LoadPersistentMVA ( CSphString & sError ); bool JuggleFile ( const char* szExt, bool bNeedOrigin=true ); XQNode_t * ExpandPrefix ( XQNode_t * pNode, CSphString & sError, CSphQueryResultMeta * pResult ) const; }; int CSphIndex_VLN::m_iIndexTagSeq = 0; ///////////////////////////////////////////////////////////////////////////// // UTILITY FUNCTIONS ///////////////////////////////////////////////////////////////////////////// /// indexer warning void sphWarn ( const char * sTemplate, ... ) { va_list ap; va_start ( ap, sTemplate ); fprintf ( stdout, "WARNING: " ); vfprintf ( stdout, sTemplate, ap ); fprintf ( stdout, "\n" ); va_end ( ap ); } ////////////////////////////////////////////////////////////////////////// /// microsecond precision timestamp int64_t sphMicroTimer() { #if USE_WINDOWS // Windows time query static int64_t iBase = 0; static int64_t iStart = 0; static int64_t iFreq = 0; LARGE_INTEGER iLarge; if ( !iBase ) { // get start QPC value QueryPerformanceFrequency ( &iLarge ); iFreq = iLarge.QuadPart; QueryPerformanceCounter ( &iLarge ); iStart = iLarge.QuadPart; // get start UTC timestamp // assuming it's still approximately the same moment as iStart, give or take a msec or three FILETIME ft; GetSystemTimeAsFileTime ( &ft ); iBase = ( int64_t(ft.dwHighDateTime)<<32 ) + int64_t(ft.dwLowDateTime); iBase = ( iBase - 116444736000000000ULL ) / 10; // rebase from 01 Jan 1601 to 01 Jan 1970, and rescale to 1 usec from 100 ns } // we can't easily drag iBase into parens because iBase*iFreq/1000000 overflows 64bit int! QueryPerformanceCounter ( &iLarge ); return iBase + ( iLarge.QuadPart - iStart )*1000000/iFreq; #else // UNIX time query struct timeval tv; gettimeofday ( &tv, NULL ); return int64_t(tv.tv_sec)*int64_t(1000000) + int64_t(tv.tv_usec); #endif // USE_WINDOWS } ////////////////////////////////////////////////////////////////////////// static int g_iMaxIOps = 0; static int g_iMaxIOSize = 0; static int64_t g_tmLastIOTime = 0; void sphSetThrottling ( int iMaxIOps, int iMaxIOSize ) { g_iMaxIOps = iMaxIOps; g_iMaxIOSize = iMaxIOSize; } static inline void sphThrottleSleep () { if ( g_iMaxIOps>0 ) { int64_t tmTimer = sphMicroTimer(); int64_t tmSleep = Max ( 0, g_tmLastIOTime + 1000000/g_iMaxIOps - tmTimer ); sphSleepMsec ( (int)(tmSleep/1000) ); g_tmLastIOTime = tmTimer + tmSleep; } } bool sphWriteThrottled ( int iFD, const void * pBuf, int64_t iCount, const char * sName, CSphString & sError ) { if ( iCount<=0 ) return true; // by default, slice ios by at most 1 GB int iChunkSize = ( 1UL<<30 ); // when there's a sane max_iosize (4K to 1GB), use it if ( g_iMaxIOSize>=4096 ) iChunkSize = Min ( iChunkSize, g_iMaxIOSize ); // while there's data, write it chunk by chunk const BYTE * p = (const BYTE*) pBuf; while ( iCount>0 ) { // wait for a timely occasion sphThrottleSleep (); // write (and maybe time) int64_t tmTimer = 0; if ( g_bIOStats ) tmTimer = sphMicroTimer(); int iToWrite = iChunkSize; if ( iCount g_iMaxIOSize ) { size_t nChunks = iCount / g_iMaxIOSize; size_t nBytesLeft = iCount % g_iMaxIOSize; size_t nBytesRead = 0; size_t iRead = 0; for ( size_t i=0; i 0 ) { iRead = sphReadThrottled ( iFD, (char *)pBuf + nChunks*g_iMaxIOSize, nBytesLeft ); nBytesRead += iRead; if ( iRead!=nBytesLeft ) return nBytesRead; } return nBytesRead; } sphThrottleSleep (); return sphRead ( iFD, pBuf, iCount ); } void SafeClose ( int & iFD ) { if ( iFD>=0 ) ::close ( iFD ); iFD = -1; } ////////////////////////////////////////////////////////////////////////// #if !USE_WINDOWS char * strlwr ( char * s ) { while ( *s ) { *s = tolower ( *s ); s++; } return s; } #endif char * sphStrMacro ( const char * sTemplate, const char * sMacro, SphDocID_t uValue ) { // expand macro char sExp[32]; snprintf ( sExp, sizeof(sExp), DOCID_FMT, uValue ); // calc lengths int iExp = strlen ( sExp ); int iMacro = strlen ( sMacro ); int iDelta = iExp-iMacro; // calc result length int iRes = strlen ( sTemplate ); const char * sCur = sTemplate; while ( ( sCur = strstr ( sCur, sMacro ) )!=NULL ) { iRes += iDelta; sCur++; } // build result char * sRes = new char [ iRes+1 ]; char * sOut = sRes; const char * sLast = sTemplate; sCur = sTemplate; while ( ( sCur = strstr ( sCur, sMacro ) )!=NULL ) { strncpy ( sOut, sLast, sCur-sLast ); sOut += sCur-sLast; strcpy ( sOut, sExp ); sOut += iExp; // NOLINT sCur += iMacro; sLast = sCur; } if ( *sLast ) strcpy ( sOut, sLast ); // NOLINT assert ( (int)strlen(sRes)==iRes ); return sRes; } float sphToFloat ( const char * s ) { if ( !s ) return 0.0f; return (float)strtod ( s, NULL ); } DWORD sphToDword ( const char * s ) { if ( !s ) return 0; return strtoul ( s, NULL, 10 ); } uint64_t sphToUint64 ( const char * s ) { if ( !s ) return 0; return strtoull ( s, NULL, 10 ); } int64_t sphToInt64 ( const char * s ) { if ( !s ) return 0; return strtoll ( s, NULL, 10 ); } #if USE_64BIT #define sphToDocid sphToUint64 #else #define sphToDocid sphToDword #endif #if USE_WINDOWS bool sphLockEx ( int iFile, bool bWait ) { HANDLE hHandle = (HANDLE) _get_osfhandle ( iFile ); if ( hHandle!=INVALID_HANDLE_VALUE ) { OVERLAPPED tOverlapped; memset ( &tOverlapped, 0, sizeof ( tOverlapped ) ); return !!LockFileEx ( hHandle, LOCKFILE_EXCLUSIVE_LOCK | ( bWait ? 0 : LOCKFILE_FAIL_IMMEDIATELY ), 0, 1, 0, &tOverlapped ); } return false; } void sphLockUn ( int iFile ) { HANDLE hHandle = (HANDLE) _get_osfhandle ( iFile ); if ( hHandle!=INVALID_HANDLE_VALUE ) { OVERLAPPED tOverlapped; memset ( &tOverlapped, 0, sizeof ( tOverlapped ) ); UnlockFileEx ( hHandle, 0, 1, 0, &tOverlapped ); } } #else bool sphLockEx ( int iFile, bool bWait ) { struct flock tLock; tLock.l_type = F_WRLCK; tLock.l_whence = SEEK_SET; tLock.l_start = 0; tLock.l_len = 0; int iCmd = bWait ? F_SETLKW : F_SETLK; // FIXME! check for HAVE_F_SETLKW? return ( fcntl ( iFile, iCmd, &tLock )!=-1 ); } void sphLockUn ( int iFile ) { struct flock tLock; tLock.l_type = F_UNLCK; tLock.l_whence = SEEK_SET; tLock.l_start = 0; tLock.l_len = 0; fcntl ( iFile, F_SETLK, &tLock ); } #endif void sphSleepMsec ( int iMsec ) { if ( iMsec<=0 ) return; #if USE_WINDOWS Sleep ( iMsec ); #else struct timeval tvTimeout; tvTimeout.tv_sec = iMsec / 1000; // full seconds tvTimeout.tv_usec = ( iMsec % 1000 ) * 1000; // remainder is msec, so *1000 for usec select ( 0, NULL, NULL, NULL, &tvTimeout ); // FIXME? could handle EINTR #endif } bool sphIsReadable ( const char * sPath, CSphString * pError ) { int iFD = ::open ( sPath, O_RDONLY ); if ( iFD<0 ) { if ( pError ) pError->SetSprintf ( "%s unreadable: %s", sPath, strerror(errno) ); return false; } close ( iFD ); return true; } void sphSetReadBuffers ( int iReadBuffer, int iReadUnhinted ) { if ( iReadBuffer<=0 ) iReadBuffer = DEFAULT_READ_BUFFER; g_iReadBuffer = Max ( iReadBuffer, MIN_READ_BUFFER ); if ( iReadUnhinted<=0 ) iReadUnhinted = DEFAULT_READ_UNHINTED; g_iReadUnhinted = Max ( iReadUnhinted, MIN_READ_UNHINTED ); } ////////////////////////////////////////////////////////////////////////// // DOCINFO ////////////////////////////////////////////////////////////////////////// static DWORD * g_pMvaArena = NULL; ///< initialized by sphArenaInit() // OPTIMIZE! try to inline or otherwise simplify maybe const DWORD * CSphMatch::GetAttrMVA ( const CSphAttrLocator & tLoc, const DWORD * pPool ) const { DWORD uIndex = MVA_DOWNSIZE ( GetAttr ( tLoc ) ); if ( !uIndex ) return NULL; if ( uIndex & MVA_ARENA_FLAG ) return g_pMvaArena + ( uIndex & MVA_OFFSET_MASK ); assert ( pPool ); return pPool + uIndex; } ///////////////////////////////////////////////////////////////////////////// // TOKENIZERS ///////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS #pragma warning(disable:4127) // conditional expr is const for MSVC #endif inline int sphUTF8Decode ( BYTE * & pBuf ); // forward ref for GCC inline int sphUTF8Encode ( BYTE * pBuf, int iCode ); // forward ref for GCC /// synonym list entry struct CSphSynonym { CSphString m_sFrom; ///< specially packed list of map-from tokens CSphString m_sTo; ///< map-to string int m_iFromLen; ///< cached m_sFrom length int m_iToLen; ///< cached m_sTo length inline bool operator < ( const CSphSynonym & rhs ) const { return strcmp ( m_sFrom.cstr(), rhs.m_sFrom.cstr() ) < 0; } }; /// tokenizer implementation traits template < bool IS_UTF8 > class CSphTokenizerTraits : public ISphTokenizer { public: CSphTokenizerTraits (); virtual bool SetCaseFolding ( const char * sConfig, CSphString & sError ); virtual bool LoadSynonyms ( const char * sFilename, CSphString & sError ); virtual void CloneBase ( const CSphTokenizerTraits * pFrom, bool bEscaped ); virtual const char * GetTokenStart () const { return (const char *) m_pTokenStart; } virtual const char * GetTokenEnd () const { return (const char *) m_pTokenEnd; } virtual const char * GetBufferPtr () const { return (const char *) m_pCur; } virtual const char * GetBufferEnd () const { return (const char *) m_pBufferMax; } virtual void SetBufferPtr ( const char * sNewPtr ); virtual int SkipBlended (); protected: BYTE * GetTokenSyn (); bool BlendAdjust ( BYTE * pPosition ); BYTE * GetBlendedVariant (); int CodepointArbitration ( int iCodepoint, bool bWasEscaped, bool bSpaceAhead ); protected: /// get codepoint inline int GetCodepoint () { if ( IS_UTF8 ) { while ( m_pCur=0 ) return iCode; // succesful decode } return -1; // eof } else { return m_pCur>=m_pBufferMax ? -1 : int ( *m_pCur++ ); } } /// accum codepoint inline void AccumCodepoint ( int iCode ) { assert ( iCode>0 ); assert ( m_iAccum>=0 ); // throw away everything which is over the token size if ( m_iAccum=m_sAccum && m_pAccum m_dSynonyms; ///< active synonyms CSphVector m_dSynStart; ///< map 1st byte to candidate range start CSphVector m_dSynEnd; ///< map 1st byte to candidate range end BYTE * m_pBlendStart; BYTE * m_pBlendEnd; }; /// single-byte charset tokenizer class CSphTokenizer_SBCS : public CSphTokenizerTraits { public: CSphTokenizer_SBCS (); virtual void SetBuffer ( BYTE * sBuffer, int iLength ); virtual BYTE * GetToken (); virtual ISphTokenizer * Clone ( bool bEscaped ) const; virtual bool IsUtf8 () const { return false; } virtual int GetCodepointLength ( int ) const { return 1; } }; /// UTF-8 tokenizer class CSphTokenizer_UTF8 : public CSphTokenizerTraits { public: CSphTokenizer_UTF8 (); virtual void SetBuffer ( BYTE * sBuffer, int iLength ); virtual BYTE * GetToken (); virtual ISphTokenizer * Clone ( bool bEscaped ) const; virtual bool IsUtf8 () const { return true; } virtual int GetCodepointLength ( int iCode ) const; protected: void FlushAccum (); }; /// UTF-8 tokenizer with n-grams class CSphTokenizer_UTF8Ngram : public CSphTokenizer_UTF8 { public: CSphTokenizer_UTF8Ngram () : m_iNgramLen ( 1 ) {} public: virtual bool SetNgramChars ( const char * sConfig, CSphString & sError ); virtual void SetNgramLen ( int iLen ); virtual BYTE * GetToken (); protected: int m_iNgramLen; CSphString m_sNgramCharsStr; }; struct CSphMultiform { CSphString m_sNormalForm; int m_iNormalTokenLen; CSphVector m_dTokens; }; struct CSphMultiforms { int m_iMinTokens; int m_iMaxTokens; CSphVector m_dWordforms; }; struct CSphMultiformContainer { CSphMultiformContainer () : m_iMaxTokens ( 0 ) {} int m_iMaxTokens; typedef CSphOrderedHash < CSphMultiforms *, CSphString, CSphStrHashFunc, 131072 > CSphMultiformHash; CSphMultiformHash m_Hash; }; /// Token filter class CSphTokenizer_Filter : public ISphTokenizer { public: CSphTokenizer_Filter ( ISphTokenizer * pTokenizer, const CSphMultiformContainer * pContainer ); ~CSphTokenizer_Filter (); virtual bool SetCaseFolding ( const char * sConfig, CSphString & sError ) { return m_pTokenizer->SetCaseFolding ( sConfig, sError ); } virtual void AddCaseFolding ( CSphRemapRange & tRange ) { m_pTokenizer->AddCaseFolding ( tRange ); } virtual void AddSpecials ( const char * sSpecials ) { m_pTokenizer->AddSpecials ( sSpecials ); } virtual bool SetIgnoreChars ( const char * sIgnored, CSphString & sError ) { return m_pTokenizer->SetIgnoreChars ( sIgnored, sError ); } virtual bool SetNgramChars ( const char * sConfig, CSphString & sError ) { return m_pTokenizer->SetNgramChars ( sConfig, sError ); } virtual void SetNgramLen ( int iLen ) { m_pTokenizer->SetNgramLen ( iLen ); } virtual bool LoadSynonyms ( const char * sFilename, CSphString & sError ) { return m_pTokenizer->LoadSynonyms ( sFilename, sError ); } virtual bool SetBoundary ( const char * sConfig, CSphString & sError ) { return m_pTokenizer->SetBoundary ( sConfig, sError ); } virtual void Setup ( const CSphTokenizerSettings & tSettings ) { m_pTokenizer->Setup ( tSettings ); } virtual const CSphTokenizerSettings & GetSettings () const { return m_pTokenizer->GetSettings (); } virtual const CSphSavedFile & GetSynFileInfo () const { return m_pTokenizer->GetSynFileInfo (); } virtual bool EnableSentenceIndexing ( CSphString & sError ) { return m_pTokenizer->EnableSentenceIndexing ( sError ); } virtual bool EnableZoneIndexing ( CSphString & sError ) { return m_pTokenizer->EnableZoneIndexing ( sError ); } public: virtual void SetBuffer ( BYTE * sBuffer, int iLength ); virtual BYTE * GetToken (); virtual int GetCodepointLength ( int iCode ) const { return m_pTokenizer->GetCodepointLength ( iCode ); } virtual void EnableQueryParserMode ( bool bEnable ) { m_pTokenizer->EnableQueryParserMode ( bEnable ); } virtual void EnableTokenizedMultiformTracking () { m_bBuildMultiform = true; } virtual int GetLastTokenLen () const { return m_pLastToken->m_iTokenLen; } virtual bool GetBoundary () { return m_pLastToken->m_bBoundary; } virtual bool WasTokenSpecial () { return m_pLastToken->m_bSpecial; } virtual int GetOvershortCount () { return m_pLastToken->m_iOvershortCount; } virtual BYTE * GetTokenizedMultiform () { return m_sTokenizedMultiform[0] ? m_sTokenizedMultiform : NULL; } public: virtual ISphTokenizer * Clone ( bool bEscaped ) const; virtual bool IsUtf8 () const { return m_pTokenizer->IsUtf8 (); } virtual const char * GetTokenStart () const { return m_pLastToken->m_szTokenStart; } virtual const char * GetTokenEnd () const { return m_pLastToken->m_szTokenEnd; } virtual const char * GetBufferPtr () const { return m_pLastToken ? m_pLastToken->m_pBufferPtr : m_pTokenizer->GetBufferPtr(); } virtual const char * GetBufferEnd () const { return m_pTokenizer->GetBufferEnd (); } virtual void SetBufferPtr ( const char * sNewPtr ); private: ISphTokenizer * m_pTokenizer; const CSphMultiformContainer * m_pMultiWordforms; int m_iStoredStart; int m_iStoredLen; bool m_bBuildMultiform; BYTE m_sTokenizedMultiform [ 3*SPH_MAX_WORD_LEN+4 ]; BYTE m_sOutMultiform [ 3*SPH_MAX_WORD_LEN+4 ]; struct StoredToken_t { BYTE m_sToken [3*SPH_MAX_WORD_LEN+4]; int m_iTokenLen; bool m_bBoundary; bool m_bSpecial; int m_iOvershortCount; const char * m_szTokenStart; const char * m_szTokenEnd; const char * m_pBufferPtr; }; CSphVector m_dStoredTokens; StoredToken_t m_tLastToken; StoredToken_t * m_pLastToken; void FillTokenInfo ( StoredToken_t * pToken ); }; #if USE_WINDOWS #pragma warning(default:4127) // conditional expr is const #endif ///////////////////////////////////////////////////////////////////////////// ISphTokenizer * sphCreateSBCSTokenizer () { return new CSphTokenizer_SBCS (); } ISphTokenizer * sphCreateUTF8Tokenizer () { return new CSphTokenizer_UTF8 (); } ISphTokenizer * sphCreateUTF8NgramTokenizer () { return new CSphTokenizer_UTF8Ngram (); } ///////////////////////////////////////////////////////////////////////////// enum { MASK_CODEPOINT = 0x00ffffffUL, // mask off codepoint flags MASK_FLAGS = 0xff000000UL, // mask off codepoint value FLAG_CODEPOINT_SPECIAL = 0x01000000UL, // this codepoint is special FLAG_CODEPOINT_DUAL = 0x02000000UL, // this codepoint is special but also a valid word part FLAG_CODEPOINT_NGRAM = 0x04000000UL, // this codepoint is n-gram indexed FLAG_CODEPOINT_SYNONYM = 0x08000000UL, // this codepoint is used in synonym tokens only FLAG_CODEPOINT_BOUNDARY = 0x10000000UL, // this codepoint is phrase boundary FLAG_CODEPOINT_IGNORE = 0x20000000UL, // this codepoint is ignored FLAG_CODEPOINT_BLEND = 0x40000000UL // this codepoint is "blended" (indexed both as a character, and as a separator) }; CSphLowercaser::CSphLowercaser () : m_pData ( NULL ) { } void CSphLowercaser::Reset() { m_iChunks = 0; for ( int i=0; im_iChunks; m_pData = new int [ m_iChunks*CHUNK_SIZE ]; memcpy ( m_pData, pLC->m_pData, sizeof(int)*m_iChunks*CHUNK_SIZE ); // NOLINT sizeof(int) for ( int i=0; im_pChunk[i] ? pLC->m_pChunk[i] - pLC->m_pData + m_pData : NULL; } void CSphLowercaser::AddRemaps ( const CSphVector & dRemaps, DWORD uFlags ) { if ( !dRemaps.GetLength() ) return; // build new chunks map // 0 means "was unused" // 1 means "was used" // 2 means "is used now" int dUsed [ CHUNK_COUNT ]; for ( int i=0; i=0 && (_a)> CHUNK_BITS ); iChunk<=( tRemap.m_iEnd >> CHUNK_BITS ); iChunk++ ) if ( dUsed[iChunk]==0 ) { dUsed[iChunk] = 2; iNewChunks++; } } // alloc new tables and copy, if necessary if ( iNewChunks>m_iChunks ) { int * pData = new int [ iNewChunks*CHUNK_SIZE ]; memset ( pData, 0, sizeof(int)*iNewChunks*CHUNK_SIZE ); // NOLINT sizeof(int) int * pChunk = pData; for ( int i=0; i> CHUNK_BITS ] ); int & iCodepoint = m_pChunk [ j >> CHUNK_BITS ] [ j & CHUNK_MASK ]; bool bWordPart = ( iCodepoint & MASK_CODEPOINT ) && !( iCodepoint & FLAG_CODEPOINT_SYNONYM ); int iNew = iRemapped | uFlags | ( iCodepoint & MASK_FLAGS ); iCodepoint = bWordPart ? ( iNew | FLAG_CODEPOINT_DUAL ) : iNew; // new code-point flag removes SYNONYM if ( ( iCodepoint & FLAG_CODEPOINT_SYNONYM ) && uFlags==0 && iRemapped!=0 ) iCodepoint &= ~FLAG_CODEPOINT_SYNONYM; } } } void CSphLowercaser::AddSpecials ( const char * sSpecials ) { assert ( sSpecials ); int iSpecials = strlen(sSpecials); CSphVector dRemaps; dRemaps.Resize ( iSpecials ); ARRAY_FOREACH ( i, dRemaps ) dRemaps[i].m_iStart = dRemaps[i].m_iEnd = dRemaps[i].m_iRemapStart = sSpecials[i]; AddRemaps ( dRemaps, FLAG_CODEPOINT_SPECIAL ); } const CSphLowercaser & CSphLowercaser::operator = ( const CSphLowercaser & rhs ) { SetRemap ( &rhs ); return * this; } uint64_t CSphLowercaser::GetFNV () const { int iLen = ( sizeof(int) * m_iChunks * CHUNK_SIZE ) / sizeof(BYTE); // NOLINT return sphFNV64 ( (BYTE *)m_pData, iLen ); } ///////////////////////////////////////////////////////////////////////////// /// parser to build lowercaser from textual config class CSphCharsetDefinitionParser { public: CSphCharsetDefinitionParser () : m_bError ( false ) {} bool Parse ( const char * sConfig, CSphVector & dRanges ); const char * GetLastError (); protected: bool m_bError; char m_sError [ 1024 ]; const char * m_pCurrent; bool Error ( const char * sMessage ); void SkipSpaces (); bool IsEof (); bool CheckEof (); int HexDigit ( int c ); int ParseCharsetCode (); bool AddRange ( const CSphRemapRange & tRange, CSphVector & dRanges ); }; const char * CSphCharsetDefinitionParser::GetLastError () { return m_bError ? m_sError : NULL; } bool CSphCharsetDefinitionParser::IsEof () { return ( *m_pCurrent )==0; } bool CSphCharsetDefinitionParser::CheckEof () { if ( IsEof() ) { Error ( "unexpected end of line" ); return true; } else { return false; } } bool CSphCharsetDefinitionParser::Error ( const char * sMessage ) { char sErrorBuffer[32]; strncpy ( sErrorBuffer, m_pCurrent, sizeof(sErrorBuffer) ); sErrorBuffer [ sizeof(sErrorBuffer)-1 ] = '\0'; snprintf ( m_sError, sizeof(m_sError), "%s near '%s'", sMessage, sErrorBuffer ); m_sError [ sizeof(m_sError)-1 ] = '\0'; m_bError = true; return false; } int CSphCharsetDefinitionParser::HexDigit ( int c ) { if ( c>='0' && c<='9' ) return c-'0'; if ( c>='a' && c<='f' ) return c-'a'+10; if ( c>='A' && c<='F' ) return c-'A'+10; return 0; } void CSphCharsetDefinitionParser::SkipSpaces () { while ( ( *m_pCurrent ) && isspace ( (BYTE)*m_pCurrent ) ) m_pCurrent++; } int CSphCharsetDefinitionParser::ParseCharsetCode () { const char * p = m_pCurrent; int iCode = 0; if ( p[0]=='U' && p[1]=='+' ) { p += 2; while ( isxdigit(*p) ) { iCode = iCode*16 + HexDigit ( *p++ ); } while ( isspace(*p) ) p++; } else { if ( (*(BYTE*)p)<32 || (*(BYTE*)p)>127 ) { Error ( "non-ASCII characters not allowed, use 'U+00AB' syntax" ); return -1; } iCode = *p++; while ( isspace(*p) ) p++; } m_pCurrent = p; return iCode; } bool CSphCharsetDefinitionParser::AddRange ( const CSphRemapRange & tRange, CSphVector & dRanges ) { if ( tRange.m_iRemapStart>=0x20 ) { dRanges.Add ( tRange ); return true; } CSphString sError; sError.SetSprintf ( "dest range (U+0x%x) below U+0x20, not allowed", tRange.m_iRemapStart ); Error ( sError.cstr() ); return false; } bool CSphCharsetDefinitionParser::Parse ( const char * sConfig, CSphVector & dRanges ) { m_pCurrent = sConfig; dRanges.Reset (); // do parse while ( *m_pCurrent ) { SkipSpaces (); if ( IsEof () ) break; // check for stray comma if ( *m_pCurrent==',' ) return Error ( "stray ',' not allowed, use 'U+002C' instead" ); // parse char code const char * pStart = m_pCurrent; int iStart = ParseCharsetCode(); if ( iStart<0 ) return false; // stray char? if ( !*m_pCurrent || *m_pCurrent==',' ) { // stray char if ( !AddRange ( CSphRemapRange ( iStart, iStart, iStart ), dRanges ) ) return false; if ( IsEof () ) break; m_pCurrent++; continue; } // stray remap? if ( m_pCurrent[0]=='-' && m_pCurrent[1]=='>' ) { // parse and add m_pCurrent += 2; int iDest = ParseCharsetCode (); if ( iDest<0 ) return false; if ( !AddRange ( CSphRemapRange ( iStart, iStart, iDest ), dRanges ) ) return false; // it's either end of line now, or must be followed by comma if ( *m_pCurrent ) if ( *m_pCurrent++!=',' ) return Error ( "syntax error" ); continue; } // range start? if (!( m_pCurrent[0]=='.' && m_pCurrent[1]=='.' )) return Error ( "syntax error" ); m_pCurrent += 2; SkipSpaces (); if ( CheckEof () ) return false; // parse range end char code int iEnd = ParseCharsetCode (); if ( iEnd<0 ) return false; if ( iStart>iEnd ) { m_pCurrent = pStart; return Error ( "range end less than range start" ); } // stray range? if ( !*m_pCurrent || *m_pCurrent==',' ) { if ( !AddRange ( CSphRemapRange ( iStart, iEnd, iStart ), dRanges ) ) return false; if ( IsEof () ) break; m_pCurrent++; continue; } // "checkerboard" range? if ( m_pCurrent[0]=='/' && m_pCurrent[1]=='2' ) { for ( int i=iStart; i' )) return Error ( "expected end of line, ',' or '->'" ); m_pCurrent += 2; SkipSpaces (); if ( CheckEof () ) return false; // parse dest start const char * pRemapStart = m_pCurrent; int iRemapStart = ParseCharsetCode (); if ( iRemapStart<0 ) return false; // expect '..' if ( CheckEof () ) return false; if (!( m_pCurrent[0]=='.' && m_pCurrent[1]=='.' )) return Error ( "expected '..'" ); m_pCurrent += 2; // parse dest end int iRemapEnd = ParseCharsetCode (); if ( iRemapEnd<0 ) return false; // check dest range if ( iRemapStart>iRemapEnd ) { m_pCurrent = pRemapStart; return Error ( "dest range end less than dest range start" ); } // check for length mismatch if ( ( iRemapEnd-iRemapStart )!=( iEnd-iStart ) ) { m_pCurrent = pStart; return Error ( "dest range length must match src range length" ); } // remapped ok if ( !AddRange ( CSphRemapRange ( iStart, iEnd, iRemapStart ), dRanges ) ) return false; if ( IsEof () ) break; if ( *m_pCurrent!=',' ) return Error ( "expected ','" ); m_pCurrent++; } dRanges.Sort (); for ( int i=0; i=dRanges[i+1].m_iStart ) { // FIXME! add an ambiguity check dRanges[i].m_iEnd = Max ( dRanges[i].m_iEnd, dRanges[i+1].m_iEnd ); dRanges.Remove ( i+1 ); i--; } } return true; } ////////////////////////////////////////////////////////////////////////// bool sphParseCharset ( const char * sCharset, CSphVector & dRemaps ) { CSphCharsetDefinitionParser tParser; return tParser.Parse ( sCharset, dRemaps ); } ///////////////////////////////////////////////////////////////////////////// CSphSavedFile::CSphSavedFile () : m_uSize ( 0 ) , m_uCTime ( 0 ) , m_uMTime ( 0 ) , m_uCRC32 ( 0 ) { } CSphTokenizerSettings::CSphTokenizerSettings () : m_iType ( TOKENIZER_SBCS ) , m_iMinWordLen ( 1 ) , m_iNgramLen ( 0 ) { } void LoadTokenizerSettings ( CSphReader & tReader, CSphTokenizerSettings & tSettings, DWORD uVersion, CSphString & sWarning ) { if ( uVersion<9 ) return; tSettings.m_iType = tReader.GetByte (); tSettings.m_sCaseFolding = tReader.GetString (); tSettings.m_iMinWordLen = tReader.GetDword (); tSettings.m_sSynonymsFile = tReader.GetString (); ReadFileInfo ( tReader, tSettings.m_sSynonymsFile.cstr (), sWarning ); tSettings.m_sBoundary = tReader.GetString (); tSettings.m_sIgnoreChars = tReader.GetString (); tSettings.m_iNgramLen = tReader.GetDword (); tSettings.m_sNgramChars = tReader.GetString (); if ( uVersion>=15 ) tSettings.m_sBlendChars = tReader.GetString (); if ( uVersion>=24 ) tSettings.m_sBlendMode = tReader.GetString(); } void SaveTokenizerSettings ( CSphWriter & tWriter, ISphTokenizer * pTokenizer ) { assert ( pTokenizer ); const CSphTokenizerSettings & tSettings = pTokenizer->GetSettings (); tWriter.PutByte ( tSettings.m_iType ); tWriter.PutString ( tSettings.m_sCaseFolding.cstr () ); tWriter.PutDword ( tSettings.m_iMinWordLen ); tWriter.PutString ( tSettings.m_sSynonymsFile.cstr () ); WriteFileInfo ( tWriter, pTokenizer->GetSynFileInfo () ); tWriter.PutString ( tSettings.m_sBoundary.cstr () ); tWriter.PutString ( tSettings.m_sIgnoreChars.cstr () ); tWriter.PutDword ( tSettings.m_iNgramLen ); tWriter.PutString ( tSettings.m_sNgramChars.cstr () ); tWriter.PutString ( tSettings.m_sBlendChars.cstr () ); tWriter.PutString ( tSettings.m_sBlendMode.cstr () ); } void LoadDictionarySettings ( CSphReader & tReader, CSphDictSettings & tSettings, DWORD uVersion, CSphString & sWarning ) { if ( uVersion<9 ) return; tSettings.m_sMorphology = tReader.GetString (); tSettings.m_sStopwords = tReader.GetString (); int nFiles = tReader.GetDword (); CSphString sFile; for ( int i = 0; i < nFiles; i++ ) { sFile = tReader.GetString (); ReadFileInfo ( tReader, sFile.cstr (), sWarning ); } tSettings.m_sWordforms = tReader.GetString (); ReadFileInfo ( tReader, tSettings.m_sWordforms.cstr (), sWarning ); if ( uVersion>=13 ) tSettings.m_iMinStemmingLen = tReader.GetDword (); tSettings.m_bWordDict = false; // default to crc for old indexes if ( uVersion>=21 ) tSettings.m_bWordDict = ( tReader.GetByte()!=0 ); } void SaveDictionarySettings ( CSphWriter & tWriter, CSphDict * pDict, bool bForceWordDict ) { assert ( pDict ); const CSphDictSettings & tSettings = pDict->GetSettings (); tWriter.PutString ( tSettings.m_sMorphology.cstr () ); tWriter.PutString ( tSettings.m_sStopwords.cstr () ); const CSphVector & dSWFileInfos = pDict->GetStopwordsFileInfos (); tWriter.PutDword ( dSWFileInfos.GetLength () ); ARRAY_FOREACH ( i, dSWFileInfos ) { tWriter.PutString ( dSWFileInfos[i].m_sFilename.cstr () ); WriteFileInfo ( tWriter, dSWFileInfos[i] ); } const CSphSavedFile & tWFFileInfo = pDict->GetWordformsFileInfo (); tWriter.PutString ( tSettings.m_sWordforms.cstr () ); WriteFileInfo ( tWriter, tWFFileInfo ); tWriter.PutDword ( tSettings.m_iMinStemmingLen ); tWriter.PutByte ( tSettings.m_bWordDict || bForceWordDict ); } static inline bool ShortTokenFilter ( BYTE * pToken, int iLen ) { return pToken[0]=='*' || ( iLen > 0 && pToken[iLen-1]=='*' ); } ///////////////////////////////////////////////////////////////////////////// ISphTokenizer::ISphTokenizer () : m_iLastTokenLen ( 0 ) , m_bTokenBoundary ( false ) , m_bBoundary ( false ) , m_bWasSpecial ( false ) , m_bEscaped ( false ) , m_iOvershortCount ( 0 ) , m_bBlended ( false ) , m_bNonBlended ( true ) , m_bBlendedPart ( false ) , m_bBlendAdd ( false ) , m_uBlendVariants ( BLEND_TRIM_NONE ) , m_uBlendVariantsPending ( 0 ) , m_bBlendSkipPure ( false ) , m_bShortTokenFilter ( false ) , m_bQueryMode ( false ) , m_bDetectSentences ( false ) , m_bPhrase ( false ) {} bool ISphTokenizer::SetCaseFolding ( const char * sConfig, CSphString & sError ) { CSphVector dRemaps; CSphCharsetDefinitionParser tParser; if ( !tParser.Parse ( sConfig, dRemaps ) ) { sError = tParser.GetLastError(); return false; } m_tLC.Reset (); m_tLC.AddRemaps ( dRemaps, 0 ); return true; } void ISphTokenizer::AddCaseFolding ( CSphRemapRange & tRange ) { CSphVector dTmp; dTmp.Add ( tRange ); m_tLC.AddRemaps ( dTmp, 0 ); } void ISphTokenizer::AddSpecials ( const char * sSpecials ) { m_tLC.AddSpecials ( sSpecials ); } static int TokenizeOnWhitespace ( CSphVector & dTokens, BYTE * sFrom, bool bUtf8 ) { BYTE sAccum [ 3*SPH_MAX_WORD_LEN+16 ]; BYTE * pAccum = sAccum; int iAccum = 0; for ( ;; ) { int iCode = bUtf8 ? sphUTF8Decode(sFrom) : *sFrom++; // eof or whitespace? if ( !iCode || sphIsSpace(iCode) ) { // flush accum if ( iAccum ) { *pAccum = '\0'; dTokens.Add ( (char*)sAccum ); pAccum = sAccum; iAccum = 0; } // break on eof if ( !iCode ) break; } else { // accumulate everything else if ( iAccums && sphIsSpace(*sEnd) ) sEnd--; *++sEnd = '\0'; return s; } void ISphTokenizer::Setup ( const CSphTokenizerSettings & tSettings ) { m_tSettings = tSettings; m_tSettings.m_iMinWordLen = Max ( tSettings.m_iMinWordLen, 1 ); } ISphTokenizer * ISphTokenizer::Create ( const CSphTokenizerSettings & tSettings, CSphString & sError ) { CSphScopedPtr pTokenizer ( NULL ); switch ( tSettings.m_iType ) { case TOKENIZER_SBCS: pTokenizer = sphCreateSBCSTokenizer (); break; case TOKENIZER_UTF8: pTokenizer = sphCreateUTF8Tokenizer (); break; case TOKENIZER_NGRAM: pTokenizer = sphCreateUTF8NgramTokenizer (); break; default: sError.SetSprintf ( "failed to create tokenizer (unknown charset type '%d')", tSettings.m_iType ); return NULL; } pTokenizer->Setup ( tSettings ); if ( !tSettings.m_sCaseFolding.IsEmpty () && !pTokenizer->SetCaseFolding ( tSettings.m_sCaseFolding.cstr (), sError ) ) { sError.SetSprintf ( "'charset_table': %s", sError.cstr() ); return NULL; } if ( !tSettings.m_sSynonymsFile.IsEmpty () && !pTokenizer->LoadSynonyms ( tSettings.m_sSynonymsFile.cstr (), sError ) ) { sError.SetSprintf ( "'synonyms': %s", sError.cstr() ); return NULL; } if ( !tSettings.m_sBoundary.IsEmpty () && !pTokenizer->SetBoundary ( tSettings.m_sBoundary.cstr (), sError ) ) { sError.SetSprintf ( "'phrase_boundary': %s", sError.cstr() ); return NULL; } if ( !tSettings.m_sIgnoreChars.IsEmpty () && !pTokenizer->SetIgnoreChars ( tSettings.m_sIgnoreChars.cstr (), sError ) ) { sError.SetSprintf ( "'ignore_chars': %s", sError.cstr() ); return NULL; } if ( !tSettings.m_sBlendChars.IsEmpty () && !pTokenizer->SetBlendChars ( tSettings.m_sBlendChars.cstr (), sError ) ) { sError.SetSprintf ( "'blend_chars': %s", sError.cstr() ); return NULL; } if ( !pTokenizer->SetBlendMode ( tSettings.m_sBlendMode.cstr (), sError ) ) { sError.SetSprintf ( "'blend_mode': %s", sError.cstr() ); return NULL; } pTokenizer->SetNgramLen ( tSettings.m_iNgramLen ); if ( !tSettings.m_sNgramChars.IsEmpty () && !pTokenizer->SetNgramChars ( tSettings.m_sNgramChars.cstr (), sError ) ) { sError.SetSprintf ( "'ngram_chars': %s", sError.cstr() ); return NULL; } return pTokenizer.LeakPtr (); } ISphTokenizer * ISphTokenizer::CreateTokenFilter ( ISphTokenizer * pTokenizer, const CSphMultiformContainer * pContainer ) { if ( !pContainer ) return NULL; return new CSphTokenizer_Filter ( pTokenizer, pContainer ); } bool ISphTokenizer::AddSpecialsSPZ ( const char * sSpecials, const char * sDirective, CSphString & sError ) { for ( int i=0; sSpecials[i]; i++ ) { int iCode = m_tLC.ToLower ( sSpecials[i] ); if ( iCode & ( FLAG_CODEPOINT_NGRAM | FLAG_CODEPOINT_BOUNDARY | FLAG_CODEPOINT_IGNORE ) ) { sError.SetSprintf ( "%s requires that character '%c' is not in ngram_chars, phrase_boundary, or ignore_chars", sDirective, sSpecials[i] ); return false; } } AddSpecials ( sSpecials ); return true; } bool ISphTokenizer::EnableSentenceIndexing ( CSphString & sError ) { const char sSpecials[] = { '.', '?', '!', MAGIC_CODE_PARAGRAPH, 0 }; if ( !AddSpecialsSPZ ( sSpecials, "index_sp", sError ) ) return false; m_bDetectSentences = true; return true; } bool ISphTokenizer::EnableZoneIndexing ( CSphString & sError ) { const char sSpecials[] = { MAGIC_CODE_ZONE, 0 }; return AddSpecialsSPZ ( sSpecials, "index_zones", sError ); } ////////////////////////////////////////////////////////////////////////// template < bool IS_UTF8 > CSphTokenizerTraits::CSphTokenizerTraits () : m_pBuffer ( NULL ) , m_pBufferMax ( NULL ) , m_pCur ( NULL ) , m_pTokenStart ( NULL ) , m_pTokenEnd ( NULL ) , m_iAccum ( 0 ) , m_pBlendStart ( NULL ) , m_pBlendEnd ( NULL ) { m_pAccum = m_sAccum; } template < bool IS_UTF8 > bool CSphTokenizerTraits::SetCaseFolding ( const char * sConfig, CSphString & sError ) { if ( m_dSynonyms.GetLength() ) { sError = "SetCaseFolding() must not be called after LoadSynonyms()"; return false; } return ISphTokenizer::SetCaseFolding ( sConfig, sError ); } template < bool IS_UTF8 > bool CSphTokenizerTraits::LoadSynonyms ( const char * sFilename, CSphString & sError ) { m_dSynonyms.Reset (); if ( !sFilename || !*sFilename ) return true; GetFileStats ( sFilename, m_tSynFileInfo ); FILE * fp = fopen ( sFilename, "r" ); if ( !fp ) { sError.SetSprintf ( "failed to open '%s'", sFilename ); return false; } int iLine = 0; char sBuffer[1024]; CSphOrderedHash < int, int, IdentityHash_fn, 4096 > hSynonymOnly; CSphVector dFrom; bool bOK = false; for ( ;; ) { char * sGot = fgets ( sBuffer, sizeof(sBuffer), fp ); if ( !sGot ) { if ( feof(fp) ) bOK = true; break; } iLine++; dFrom.Resize ( 0 ); // extract map-from and map-to parts char * sSplit = strstr ( sBuffer, "=>" ); if ( !sSplit ) { sError.SetSprintf ( "%s line %d: mapping token (=>) not found", sFilename, iLine ); break; } BYTE * sFrom = (BYTE *) sBuffer; BYTE * sTo = (BYTE *)( sSplit + strlen ( "=>" ) ); *sSplit = '\0'; // tokenize map-from if ( !TokenizeOnWhitespace ( dFrom, sFrom, IsUtf8() ) ) { sError.SetSprintf ( "%s line %d: empty map-from part", sFilename, iLine ); break; } // trim map-to sTo = sphTrim ( sTo ); if ( !*sTo ) { sError.SetSprintf ( "%s line %d: empty map-to part", sFilename, iLine ); break; } // check lengths ARRAY_FOREACH ( i, dFrom ) { int iFromLen = IsUtf8() ? sphUTF8Len ( dFrom[i].cstr() ) : strlen ( dFrom[i].cstr() ); if ( iFromLen>SPH_MAX_WORD_LEN ) { sError.SetSprintf ( "%s line %d: map-from token too long (over %d bytes)", sFilename, iLine, SPH_MAX_WORD_LEN ); break; } } int iToLen = IsUtf8() ? sphUTF8Len ( (const char*)sTo ) : strlen ( (const char*)sTo ); if ( iToLen>SPH_MAX_WORD_LEN ) { sError.SetSprintf ( "%s line %d: map-to token too long (over %d bytes)", sFilename, iLine, SPH_MAX_WORD_LEN ); break; } // pack and store it int iFromLen = 1; ARRAY_FOREACH ( i, dFrom ) iFromLen += strlen ( dFrom[i].cstr() ) + 1; if ( iFromLen>MAX_SYNONYM_LEN ) { sError.SetSprintf ( "%s line %d: map-from part too long (over %d bytes)", sFilename, iLine, MAX_SYNONYM_LEN ); break; } CSphSynonym & tSyn = m_dSynonyms.Add (); tSyn.m_sFrom.Reserve ( iFromLen ); tSyn.m_iFromLen = iFromLen; tSyn.m_sTo = (char*)sTo; tSyn.m_iToLen = iToLen; char * sCur = const_cast ( tSyn.m_sFrom.cstr() ); ARRAY_FOREACH ( i, dFrom ) { int iLen = strlen ( dFrom[i].cstr() ); memcpy ( sCur, dFrom[i].cstr(), iLen ); sCur[iLen] = MAGIC_SYNONYM_WHITESPACE; sCur += iLen+1; } *sCur++ = '\0'; assert ( sCur-tSyn.m_sFrom.cstr()==iFromLen ); // track synonym-only codepoints in map-from for ( ;; ) { int iCode = IsUtf8() ? sphUTF8Decode(sFrom) : *sFrom++; if ( !iCode ) break; if ( iCode>0 && !sphIsSpace(iCode) && !m_tLC.ToLower(iCode) ) hSynonymOnly.Add ( 1, iCode ); } } fclose ( fp ); if ( !bOK ) { m_dSynonyms.Reset (); return false; } // sort the list m_dSynonyms.Sort (); // build simple lookup table m_dSynStart.Resize ( 256 ); m_dSynEnd.Resize ( 256 ); for ( int i=0; i<256; i++ ) { m_dSynStart[i] = INT_MAX; m_dSynEnd[i] = -INT_MAX; } ARRAY_FOREACH ( i, m_dSynonyms ) { int iCh = *(BYTE*)( m_dSynonyms[i].m_sFrom.cstr() ); m_dSynStart[iCh] = Min ( m_dSynStart[iCh], i ); m_dSynEnd[iCh] = Max ( m_dSynEnd[iCh], i ); } // add synonym-only remaps CSphVector dRemaps; dRemaps.Reserve ( hSynonymOnly.GetLength() ); hSynonymOnly.IterateStart (); while ( hSynonymOnly.IterateNext() ) { CSphRemapRange & tRange = dRemaps.Add (); tRange.m_iStart = tRange.m_iEnd = tRange.m_iRemapStart = hSynonymOnly.IterateGetKey(); } m_tLC.AddRemaps ( dRemaps, FLAG_CODEPOINT_SYNONYM ); return true; } template < bool IS_UTF8 > void CSphTokenizerTraits::CloneBase ( const CSphTokenizerTraits * pFrom, bool bEscaped ) { m_tLC = pFrom->m_tLC; m_dSynonyms = pFrom->m_dSynonyms; m_dSynStart = pFrom->m_dSynStart; m_dSynEnd = pFrom->m_dSynEnd; m_tSettings = pFrom->m_tSettings; m_bEscaped = bEscaped; m_uBlendVariants = pFrom->m_uBlendVariants; m_bBlendSkipPure = pFrom->m_bBlendSkipPure; if ( bEscaped ) { CSphVector dRemaps; CSphRemapRange Range; Range.m_iStart = Range.m_iEnd = Range.m_iRemapStart = '\\'; dRemaps.Add ( Range ); m_tLC.AddRemaps ( dRemaps, FLAG_CODEPOINT_SPECIAL ); } } template < bool IS_UTF8 > void CSphTokenizerTraits::SetBufferPtr ( const char * sNewPtr ) { assert ( (BYTE*)sNewPtr>=m_pBuffer && (BYTE*)sNewPtr<=m_pBufferMax ); m_pCur = Min ( m_pBufferMax, Max ( m_pBuffer, (BYTE*)sNewPtr ) ); m_iAccum = 0; m_pAccum = m_sAccum; m_pTokenStart = m_pTokenEnd = NULL; m_pBlendStart = m_pBlendEnd = NULL; } template < bool IS_UTF8 > int CSphTokenizerTraits::SkipBlended() { if ( !m_pBlendEnd ) return 0; bool bQuery = m_bQueryMode; BYTE * pMax = m_pBufferMax; m_bQueryMode = false; m_pBufferMax = m_pBlendEnd; int iBlended = 0; while ( GetToken() ) iBlended++; m_bQueryMode = bQuery; m_pBufferMax = pMax; return iBlended; } /// adjusts blending magic when we're about to return a token (any token) /// returns false if current token should be skipped, true otherwise template < bool IS_UTF8 > bool CSphTokenizerTraits::BlendAdjust ( BYTE * pCur ) { // check if all we got is a bunch of blended characters (pure-blended case) if ( m_bBlended && !m_bNonBlended ) { // we either skip this token, or pretend it was normal // in both cases, clear the flag m_bBlended = false; // do we need to skip it? if ( m_bBlendSkipPure ) { m_pBlendStart = NULL; return false; } } m_bNonBlended = false; // adjust buffer pointers if ( m_bBlended && m_pBlendStart ) { // called once per blended token, on processing start // at this point, full blended token is in the accumulator // and we're about to return it m_pCur = m_pBlendStart; m_pBlendEnd = pCur; m_pBlendStart = NULL; m_bBlendedPart = true; } else if ( pCur>=m_pBlendEnd ) { // tricky bit, as at this point, token we're about to return // can either be a blended subtoken, or the next one m_bBlendedPart = ( m_pTokenStart!=NULL ) && ( m_pTokenStart0 && *pSrc ) *pDst++ = *pSrc++; *pDst++ = '\0'; } template < bool IS_UTF8 > BYTE * CSphTokenizerTraits::GetBlendedVariant () { // we can get called on several occasions // case 1, a new blended token was just accumulated if ( m_bBlended && !m_bBlendAdd ) { // fast path for the default case (trim_none) if ( m_uBlendVariants==BLEND_TRIM_NONE ) return m_sAccum; // analyze the full token, find non-blended bounds m_iBlendNormalStart = -1; m_iBlendNormalEnd = -1; // OPTIMIZE? we can skip this based on non-blended flag from adjust BYTE * p = m_sAccum; while ( *p ) { int iLast = (int)( p-m_sAccum ); int iCode = IS_UTF8 ? sphUTF8Decode ( p ) : *p++; if (!( m_tLC.ToLower ( iCode ) & FLAG_CODEPOINT_BLEND )) { m_iBlendNormalEnd = (int)( p-m_sAccum ); if ( m_iBlendNormalStart<0 ) m_iBlendNormalStart = iLast; } } // build todo mask // check and revert a few degenerate cases m_uBlendVariantsPending = m_uBlendVariants; if ( m_uBlendVariantsPending & BLEND_TRIM_BOTH ) { if ( m_iBlendNormalStart<0 ) { // no heading blended; revert BOTH to TAIL m_uBlendVariantsPending &= ~BLEND_TRIM_BOTH; m_uBlendVariantsPending |= BLEND_TRIM_TAIL; } else if ( m_iBlendNormalEnd<0 ) { // no trailing blended; revert BOTH to HEAD m_uBlendVariantsPending &= ~BLEND_TRIM_BOTH; m_uBlendVariantsPending |= BLEND_TRIM_HEAD; } } if ( m_uBlendVariantsPending & BLEND_TRIM_HEAD ) { // either no heading blended, or pure blended; revert HEAD to NONE if ( m_iBlendNormalStart<=0 ) { m_uBlendVariantsPending &= ~BLEND_TRIM_HEAD; m_uBlendVariantsPending |= BLEND_TRIM_NONE; } } if ( m_uBlendVariantsPending & BLEND_TRIM_TAIL ) { // either no trailing blended, or pure blended; revert TAIL to NONE if ( m_iBlendNormalEnd<=0 || m_sAccum[m_iBlendNormalEnd]==0 ) { m_uBlendVariantsPending &= ~BLEND_TRIM_TAIL; m_uBlendVariantsPending |= BLEND_TRIM_NONE; } } // ok, we are going to return a few variants after all, flag that // OPTIMIZE? add fast path for "single" variants? m_bBlendAdd = true; assert ( m_uBlendVariantsPending ); // we also have to stash the original blended token // because accumulator contents may get trashed by caller (say, when stemming) strncpy ( (char*)m_sAccumBlend, (char*)m_sAccum, sizeof(m_sAccumBlend) ); } // case 2, caller is checking for pending variants, have we even got any? if ( !m_bBlendAdd ) return false; // handle trim_none // this MUST be the first handler, so that we could avoid copying below, and just return the original accumulator if ( m_uBlendVariantsPending & BLEND_TRIM_NONE ) { m_uBlendVariantsPending &= ~BLEND_TRIM_NONE; m_bBlended = true; return m_sAccum; } // handle trim_both if ( m_uBlendVariantsPending & BLEND_TRIM_BOTH ) { m_uBlendVariantsPending &= ~BLEND_TRIM_BOTH; if ( m_iBlendNormalStart<0 ) m_uBlendVariantsPending |= BLEND_TRIM_TAIL; // no heading blended; revert BOTH to TAIL else if ( m_iBlendNormalEnd<0 ) m_uBlendVariantsPending |= BLEND_TRIM_HEAD; // no trailing blended; revert BOTH to HEAD else { assert ( m_iBlendNormalStart=0 ) { // FIXME! need we check for overshorts? CopySubstring ( m_sAccum, m_sAccumBlend+m_iBlendNormalStart, sizeof(m_sAccum) ); m_bBlended = true; return m_sAccum; } } // handle TRIM_TAIL if ( m_uBlendVariantsPending & BLEND_TRIM_TAIL ) { m_uBlendVariantsPending &= ~BLEND_TRIM_TAIL; if ( m_iBlendNormalEnd>0 ) { // FIXME! need we check for overshorts? CopySubstring ( m_sAccum, m_sAccumBlend, m_iBlendNormalEnd ); m_bBlended = true; return m_sAccum; } } // all clear, no more variants to go m_bBlendAdd = false; return NULL; } static inline bool IsModifier ( int iSymbol ) { return iSymbol=='^' || iSymbol=='$' || iSymbol=='=' || iSymbol=='*'; } static inline bool IsCapital ( int iCh ) { return iCh>='A' && iCh<='Z'; } template < bool IS_UTF8 > int CSphTokenizerTraits::CodepointArbitration ( int iCode, bool bWasEscaped, bool bSpaceAhead ) { ///////////////////////////// // indexing time arbitration ///////////////////////////// if ( !m_bQueryMode ) { int iSymbol = iCode & MASK_CODEPOINT; // detect sentence boundaries // FIXME! should use charset_table (or add a new directive) and support languages other than English if ( m_bDetectSentences ) { if ( iSymbol=='?' || iSymbol=='!' ) { // definitely a sentence boundary return MAGIC_CODE_SENTENCE | FLAG_CODEPOINT_SPECIAL; } if ( iSymbol=='.' ) { // inline dot ("in the U.K and"), not a boundary bool bInwordDot = ( sphIsAlpha ( m_pCur[0] ) || m_pCur[0]==',' ); // followed by a small letter or an opening paren, not a boundary // FIXME? might want to scan for more than one space // Yoyodine Inc. exists ... // Yoyodine Inc. (the company) .. bool bInphraseDot = ( sphIsSpace ( m_pCur[0] ) && ( ( 'a'<=m_pCur[1] && m_pCur[1]<='z' ) || ( m_pCur[1]=='(' && 'a'<=m_pCur[2] && m_pCur[2]<='z' ) ) ); // preceded by something that looks like a middle name, opening first name, salutation bool bMiddleName = false; switch ( m_iAccum ) { case 1: // 1-char capital letter // example: J. R. R. Tolkien, who wrote Hobbit ... // example: John D. Doe ... bMiddleName = IsCapital ( m_pCur[-2] ); break; case 2: // 2-char token starting with a capital if ( IsCapital ( m_pCur[-3] ) ) { // capital+small // example: Known as Mr. Doe ... if ( !IsCapital ( m_pCur[-2] ) ) bMiddleName = true; // known capital+capital (MR, DR, MS) if ( ( m_pCur[-3]=='M' && m_pCur[-2]=='R' ) || ( m_pCur[-3]=='M' && m_pCur[-2]=='S' ) || ( m_pCur[-3]=='D' && m_pCur[-2]=='R' ) ) bMiddleName = true; } break; case 3: // preceded by a known 3-byte token (MRS, DRS) // example: Survived by Mrs. Doe ... if ( ( m_sAccum[0]=='m' || m_sAccum[0]=='d' ) && m_sAccum[1]=='r' && m_sAccum[2]=='s' ) bMiddleName = true; break; } if ( !bInwordDot && !bInphraseDot && !bMiddleName ) { // sentence boundary return MAGIC_CODE_SENTENCE | FLAG_CODEPOINT_SPECIAL; } else { // just a character if ( ( iCode & MASK_FLAGS )==FLAG_CODEPOINT_SPECIAL ) return 0; // special only, not dual? then in this context, it is a separator else return iCode & ~( FLAG_CODEPOINT_SPECIAL | FLAG_CODEPOINT_DUAL ); // perhaps it was blended, so return the original code } } } // pass-through return iCode; } ////////////////////////// // query time arbitration ////////////////////////// if ( iCode & FLAG_CODEPOINT_NGRAM ) return iCode; // ngrams are handled elsewhere int iSymbol = iCode & MASK_CODEPOINT; // codepoints can't be blended and special at the same time if ( ( iCode & FLAG_CODEPOINT_BLEND ) && ( iCode & FLAG_CODEPOINT_SPECIAL ) ) { bool bBlend = bWasEscaped || // escaped characters should always act as blended ( m_bPhrase && !IsModifier ( iSymbol ) ) || // non-modifier special inside phrase ( m_iAccum && ( iSymbol=='@' || iSymbol=='/' || iSymbol=='-' ) ); // some specials in the middle of a token // clear special or blend flags iCode &= bBlend ? ~( FLAG_CODEPOINT_DUAL | FLAG_CODEPOINT_SPECIAL ) : ~( FLAG_CODEPOINT_DUAL | FLAG_CODEPOINT_BLEND ); } // escaped specials are not special // dash and dollar inside the word are not special (however, single opening modifier is not a word!) // non-modifier specials within phrase are not special bool bDashInside = ( m_iAccum && iSymbol=='-' && !( m_iAccum==1 && IsModifier ( m_sAccum[0] ) )); if ( iCode & FLAG_CODEPOINT_SPECIAL ) if ( bWasEscaped || bDashInside || ( m_iAccum && iSymbol=='$' && !bSpaceAhead ) || ( m_bPhrase && iSymbol!='"' && !IsModifier ( iSymbol ) ) ) { if ( iCode & FLAG_CODEPOINT_DUAL ) iCode &= ~( FLAG_CODEPOINT_SPECIAL | FLAG_CODEPOINT_DUAL ); else if ( bDashInside && ( iCode & FLAG_CODEPOINT_SYNONYM ) ) // if we return zero here, we will break the tokens like 'Ms-Dos' iCode &= ~( FLAG_CODEPOINT_SPECIAL ); else iCode = 0; } // if we didn't remove special by now, it must win if ( iCode & FLAG_CODEPOINT_DUAL ) { assert ( iCode & FLAG_CODEPOINT_SPECIAL ); iCode = iSymbol | FLAG_CODEPOINT_SPECIAL; } // ideally, all conflicts must be resolved here // well, at least most assert ( sphBitCount ( iCode & MASK_FLAGS )<=1 || ( iCode & FLAG_CODEPOINT_SYNONYM ) ); return iCode; } enum SynCheck_e { SYNCHECK_LESS, SYNCHECK_PARTIAL, SYNCHECK_EXACT, SYNCHECK_GREATER }; static inline SynCheck_e SynCheckPrefix ( const CSphSynonym & tCandidate, int iOff, const BYTE * sCur, int iBytes, bool bMaybeSeparator ) { const BYTE * sCand = ( (const BYTE*)tCandidate.m_sFrom.cstr() ) + iOff; while ( iBytes-->0 ) { if ( *sCand!=*sCur ) { // incoming synonym-only char vs. ending sequence (eg. 2nd slash in "OS/2/3"); we actually have a match if ( bMaybeSeparator && sCand[0]==MAGIC_SYNONYM_WHITESPACE && sCand[1]=='\0' ) return SYNCHECK_EXACT; // otherwise, it is a mismatch return ( *sCand<*sCur ) ? SYNCHECK_LESS : SYNCHECK_GREATER; } sCand++; sCur++; } // full match after a full separator if ( sCand[0]=='\0' ) return SYNCHECK_EXACT; // full match after my last synonym-only char if ( bMaybeSeparator && sCand[0]==MAGIC_SYNONYM_WHITESPACE && sCand[1]=='\0' ) return SYNCHECK_EXACT; // otherwise, partial match so far return SYNCHECK_PARTIAL; } static inline bool IsSeparator ( int iFolded, bool bFirst ) { // eternal separator if ( iFolded<0 || ( iFolded & MASK_CODEPOINT )==0 ) return true; // just a codepoint if (!( iFolded & MASK_FLAGS )) return false; // any magic flag, besides dual if (!( iFolded & FLAG_CODEPOINT_DUAL )) return true; // FIXME? n-grams currently also set dual if ( iFolded & FLAG_CODEPOINT_NGRAM ) return true; // dual depends on position return bFirst; } // handles escaped specials that are not in the character set // returns true if the codepoint should be processed as a simple codepoint, // returns false if it should be processed as a whitespace // for example: aaa\!bbb => aaa bbb static inline bool Special2Simple ( int & iCodepoint ) { if ( ( iCodepoint & FLAG_CODEPOINT_DUAL ) || !( iCodepoint & FLAG_CODEPOINT_SPECIAL ) ) { iCodepoint &= ~( FLAG_CODEPOINT_SPECIAL | FLAG_CODEPOINT_DUAL ); return true; } return false; } static inline bool IsWhitespace ( BYTE c ) { return ( c=='\0' || c==' ' || c=='\t' || c=='\r' || c=='\n' ); } static inline bool IsWhitespace ( int c ) { return ( c=='\0' || c==' ' || c=='\t' || c=='\r' || c=='\n' ); } template < bool IS_UTF8 > BYTE * CSphTokenizerTraits::GetTokenSyn () { assert ( m_dSynonyms.GetLength() ); bool bEscaped = m_bEscaped; BYTE * pCur; m_bTokenBoundary = false; for ( ;; ) { // initialize accumulators and range BYTE * pFirstSeparator = NULL; m_iAccum = 0; m_pAccum = m_sAccum; int iSynStart = 0; int iSynEnd = m_dSynonyms.GetLength()-1; int iSynOff = 0; int iLastCodepoint = 0; int iLastFolded = 0; BYTE * pRescan = NULL; int iExact = -1; BYTE * pExact = NULL; // main refinement loop for ( ;; ) { // store current position (to be able to restart from it on folded boundary) pCur = m_pCur; // get next codepoint int iCode = GetCodepoint(); // handle early-out if ( iCode<0 ) { // eof at token start? we're done if ( iSynOff==0 ) return NULL; // eof after whitespace? we already checked the candidate last time, so break if ( iLastFolded==0 ) break; } // fold codepoint (and lookup flags!) int iFolded = m_tLC.ToLower ( iCode ); // handle boundaries if ( m_bBoundary && ( iFolded==0 ) ) m_bTokenBoundary = true; m_bBoundary = ( iFolded & FLAG_CODEPOINT_BOUNDARY )!=0; // skip continuous whitespace if ( iLastFolded==0 && iFolded==0 ) continue; if ( bEscaped ) { if ( iCode=='\\' && iLastCodepoint!='\\' ) { iLastCodepoint = iCode; continue; } else if ( iLastCodepoint=='\\' && !Special2Simple ( iFolded ) ) { iLastCodepoint = 0; continue; } iLastCodepoint = iCode; } iFolded = CodepointArbitration ( iFolded, false, IsWhitespace ( *m_pCur ) ); iLastFolded = iFolded; if ( m_iAccum==0 ) m_pTokenStart = pCur; // handle specials at the very word start if ( ( iFolded & FLAG_CODEPOINT_SPECIAL ) && m_iAccum==0 ) { m_bWasSpecial = !( iFolded & FLAG_CODEPOINT_NGRAM ); AccumCodepoint ( iFolded & MASK_CODEPOINT ); *m_pAccum = '\0'; m_iLastTokenLen = 1; m_pTokenStart = pCur; m_pTokenEnd = m_pCur; return m_sAccum; } // handle specials bool bJustSpecial = ( iFolded & FLAG_CODEPOINT_SPECIAL ) && !( iFolded & FLAG_CODEPOINT_DUAL ) // OPTIMIZE? && !( iFolded & FLAG_CODEPOINT_SYNONYM ); // OPTIMIZE? // if candidate starts with something special, and turns out to be not a synonym, // we will need to rescan from current position later if ( iSynOff==0 ) pRescan = IsSeparator ( iFolded, true ) ? m_pCur : NULL; // accumulate folded token if ( !pFirstSeparator ) { if ( IsSeparator ( iFolded, m_iAccum==0 ) ) { if ( m_iAccum ) pFirstSeparator = pCur; } else { if ( m_iAccum==0 ) m_pTokenStart = pCur; AccumCodepoint ( iFolded & MASK_CODEPOINT ); } } // accumulate next raw synonym symbol to refine // note that we need a special check for whitespace here, to avoid "MS*DOS" being treated as "MS DOS" synonym BYTE sTest[4]; int iTest; int iMasked = ( iCode & MASK_CODEPOINT ); if ( iFolded<=0 || bJustSpecial ) { sTest[0] = MAGIC_SYNONYM_WHITESPACE; iTest = 1; if (!( iMasked==' ' || iMasked=='\t' )) { sTest[1] = '\0'; iTest = 2; } } else { if ( IsUtf8() ) { iTest = sphUTF8Encode ( sTest, iMasked ); } else { iTest = 1; sTest[0] = BYTE(iMasked); } } // refine synonyms range #define LOC_RETURN_SYNONYM(_idx) \ { \ m_pTokenEnd = m_iAccum ? pCur : m_pCur; \ if ( bJustSpecial || ( iFolded & FLAG_CODEPOINT_SPECIAL )!=0 ) m_pCur = pCur; \ strncpy ( (char*)m_sAccum, m_dSynonyms[_idx].m_sTo.cstr(), sizeof(m_sAccum) ); \ m_iLastTokenLen = m_dSynonyms[_idx].m_iToLen; \ return m_sAccum; \ } #define LOC_REFINE_BREAK() \ { \ if ( iExact>=0 ) { m_pCur = pCur = pExact; LOC_RETURN_SYNONYM ( iExact ); } \ break; \ } // if this is the first symbol, use prebuilt lookup table to speedup initial range search if ( iSynOff==0 ) { iSynStart = m_dSynStart[sTest[0]]; iSynEnd = m_dSynEnd[sTest[0]]; if ( iSynStart>iSynEnd ) break; } // this is to catch intermediate separators (eg. "OS/2/3") bool bMaybeSeparator = ( iFolded & FLAG_CODEPOINT_SYNONYM )!=0 || ( iFolded<0 ); SynCheck_e eStart = SynCheckPrefix ( m_dSynonyms[iSynStart], iSynOff, sTest, iTest, bMaybeSeparator ); if ( eStart==SYNCHECK_EXACT ) { if ( iSynStart==iSynEnd ) LOC_RETURN_SYNONYM ( iSynStart ); iExact = iSynStart; pExact = pCur; } if ( eStart==SYNCHECK_GREATER || ( iSynStart==iSynEnd && eStart!=SYNCHECK_PARTIAL ) ) LOC_REFINE_BREAK(); SynCheck_e eEnd = SynCheckPrefix ( m_dSynonyms[iSynEnd], iSynOff, sTest, iTest, bMaybeSeparator ); if ( eEnd==SYNCHECK_LESS ) LOC_REFINE_BREAK(); if ( eEnd==SYNCHECK_EXACT ) { iExact = iSynEnd; pExact = pCur; } // refine left boundary if ( eStart!=SYNCHECK_PARTIAL && eStart!=SYNCHECK_EXACT ) { assert ( eStart==SYNCHECK_LESS ); int iL = iSynStart; int iR = iSynEnd; SynCheck_e eL = eStart; SynCheck_e eR = eEnd; while ( iR-iL>1 ) { int iM = iL + (iR-iL)/2; SynCheck_e eMid = SynCheckPrefix ( m_dSynonyms[iM], iSynOff, sTest, iTest, bMaybeSeparator ); if ( eMid==SYNCHECK_LESS ) { iL = iM; eL = eMid; } else { iR = iM; eR = eMid; } } assert ( eL==SYNCHECK_LESS ); assert ( eR!=SYNCHECK_LESS ); assert ( iR-iL==1 ); if ( eR==SYNCHECK_GREATER ) LOC_REFINE_BREAK(); if ( eR==SYNCHECK_EXACT && iR==iSynEnd ) LOC_RETURN_SYNONYM ( iR ); assert ( eR==SYNCHECK_PARTIAL || eR==SYNCHECK_EXACT ); iSynStart = iR; eStart = eR; } // refine right boundary if ( eEnd!=SYNCHECK_PARTIAL && eEnd!=SYNCHECK_EXACT ) { assert ( eEnd==SYNCHECK_GREATER ); int iL = iSynStart; int iR = iSynEnd; SynCheck_e eL = eStart; SynCheck_e eR = eEnd; while ( iR-iL>1 ) { int iM = iL + (iR-iL)/2; SynCheck_e eMid = SynCheckPrefix ( m_dSynonyms[iM], iSynOff, sTest, iTest, bMaybeSeparator ); if ( eMid==SYNCHECK_GREATER ) { iR = iM; eR = eMid; } else { iL = iM; eL = eMid; } } assert ( eR==SYNCHECK_GREATER ); assert ( eL!=SYNCHECK_GREATER ); assert ( iR-iL==1 ); if ( eL==SYNCHECK_LESS ) LOC_REFINE_BREAK(); if ( eL==SYNCHECK_EXACT && iL==iSynStart ) LOC_RETURN_SYNONYM ( iL ); assert ( eL==SYNCHECK_PARTIAL || eL==SYNCHECK_EXACT ); iSynEnd = iL; eEnd = eL; } // handle eof if ( iCode<0 ) break; // we still have a partial synonym match, continue; iSynOff += iTest; } // at this point, that was not a synonym if ( pRescan ) { m_pCur = pRescan; continue; } // at this point, it also started with a valid char assert ( m_iAccum>0 ); // find the proper separator if ( !pFirstSeparator ) { int iLast = 0; // if there was none, scan until found for ( ;; ) { pCur = m_pCur; int iCode = *pCur; int iFolded = m_tLC.ToLower ( GetCodepoint() ); if ( iFolded<0 ) break; // eof if ( bEscaped ) { if ( iCode=='\\' && iLast!='\\' ) { iLast = iCode; continue; } if ( iLast=='\\' && !Special2Simple ( iFolded ) ) break; iLast = iCode; } iFolded = CodepointArbitration ( iFolded, false, IsWhitespace ( *m_pCur ) ); if ( IsSeparator ( iFolded, false ) ) { if ( iFolded!=0 ) m_pCur = pCur; // force rescan break; } AccumCodepoint ( iFolded & MASK_CODEPOINT ); } } else { // if there was, token is ready but we should restart from that separator m_pCur = pFirstSeparator; pCur = m_pCur; } // return accumulated token if ( m_iAccum dRemaps; CSphCharsetDefinitionParser tParser; if ( !tParser.Parse ( sConfig, dRemaps ) ) { sError = tParser.GetLastError(); return false; } // check ARRAY_FOREACH ( i, dRemaps ) { const CSphRemapRange & r = dRemaps[i]; if ( !bCanRemap && r.m_iStart!=r.m_iRemapStart ) { sError.SetSprintf ( "%s characters must not be remapped (map-from=U+%x, map-to=U+%x)", sSource, r.m_iStart, r.m_iRemapStart ); return false; } for ( int j=r.m_iStart; j<=r.m_iEnd; j++ ) if ( m_tLC.ToLower(j) ) { sError.SetSprintf ( "%s characters must not be referenced anywhere else (code=U+%x)", sSource, j ); return false; } if ( bCanRemap ) for ( int j=r.m_iRemapStart; j<=r.m_iRemapStart + r.m_iEnd - r.m_iStart; j++ ) if ( m_tLC.ToLower(j) ) { sError.SetSprintf ( "%s characters must not be referenced anywhere else (code=U+%x)", sSource, j ); return false; } } // add mapping m_tLC.AddRemaps ( dRemaps, uFlags ); return true; } bool ISphTokenizer::SetBoundary ( const char * sConfig, CSphString & sError ) { return RemapCharacters ( sConfig, FLAG_CODEPOINT_BOUNDARY, "phrase boundary", false, sError ); } bool ISphTokenizer::SetIgnoreChars ( const char * sConfig, CSphString & sError ) { return RemapCharacters ( sConfig, FLAG_CODEPOINT_IGNORE, "ignored", false, sError ); } bool ISphTokenizer::SetBlendChars ( const char * sConfig, CSphString & sError ) { return RemapCharacters ( sConfig, FLAG_CODEPOINT_BLEND, "blend", true, sError ); } static bool sphStrncmp ( const char * sCheck, int iCheck, const char * sRef ) { return ( iCheck==(int)strlen(sRef) && memcmp ( sCheck, sRef, iCheck )==0 ); } bool ISphTokenizer::SetBlendMode ( const char * sMode, CSphString & sError ) { if ( !sMode || !*sMode ) { m_uBlendVariants = BLEND_TRIM_NONE; m_bBlendSkipPure = false; return true; } m_uBlendVariants = 0; const char * p = sMode; while ( *p ) { while ( !sphIsAlpha(*p) ) p++; if ( !*p ) break; const char * sTok = p; while ( sphIsAlpha(*p) ) p++; if ( sphStrncmp ( sTok, p-sTok, "trim_none" ) ) m_uBlendVariants |= BLEND_TRIM_NONE; else if ( sphStrncmp ( sTok, p-sTok, "trim_head" ) ) m_uBlendVariants |= BLEND_TRIM_HEAD; else if ( sphStrncmp ( sTok, p-sTok, "trim_tail" ) ) m_uBlendVariants |= BLEND_TRIM_TAIL; else if ( sphStrncmp ( sTok, p-sTok, "trim_both" ) ) m_uBlendVariants |= BLEND_TRIM_BOTH; else if ( sphStrncmp ( sTok, p-sTok, "skip_pure" ) ) m_bBlendSkipPure = true; else { sError.SetSprintf ( "unknown blend_mode option near '%s'", sTok ); return false; } } if ( !m_uBlendVariants ) { sError.SetSprintf ( "blend_mode must define at least one variant to index" ); m_uBlendVariants = BLEND_TRIM_NONE; m_bBlendSkipPure = false; return false; } return true; } ///////////////////////////////////////////////////////////////////////////// CSphTokenizer_SBCS::CSphTokenizer_SBCS () { CSphString sTmp; SetCaseFolding ( SPHINX_DEFAULT_SBCS_TABLE, sTmp ); } void CSphTokenizer_SBCS::SetBuffer ( BYTE * sBuffer, int iLength ) { // check that old one is over and that new length is sane assert ( iLength>=0 ); // set buffer m_pBuffer = sBuffer; m_pBufferMax = sBuffer + iLength; m_pCur = sBuffer; m_pTokenStart = m_pTokenEnd = NULL; m_pBlendStart = m_pBlendEnd = NULL; m_iOvershortCount = 0; m_bBoundary = m_bTokenBoundary = false; } BYTE * CSphTokenizer_SBCS::GetToken () { m_bWasSpecial = false; m_bBlended = false; m_iOvershortCount = 0; m_bTokenBoundary = false; if ( m_dSynonyms.GetLength() ) return GetTokenSyn (); // return pending blending variants BYTE * pVar = GetBlendedVariant (); if ( pVar ) return pVar; m_bBlendedPart = ( m_pBlendEnd!=NULL ); const bool bUseEscape = m_bEscaped; for ( ;; ) { // memorize buffer start BYTE * pCur = m_pCur; // get next codepoint, real or virtual int iCodepoint = 0; int iCode = 0; bool bWasEscaped = false; // whether current char was escaped if ( m_pCur=m_pBufferMax ? m_pCur : pCur; if ( !BlendAdjust ( pCur ) ) continue; if ( m_bBlended ) return GetBlendedVariant(); return m_sAccum; } // handle specials bool bSpecial = ( iCode & FLAG_CODEPOINT_SPECIAL )!=0; bool bNoBlend = !( iCode & FLAG_CODEPOINT_BLEND ); iCode &= MASK_CODEPOINT; if ( bSpecial ) { // skip short words if ( m_iAccum0 ); if ( m_iAccumCloneBase ( this, bEscaped ); return pClone; } ///////////////////////////////////////////////////////////////////////////// CSphTokenizer_UTF8::CSphTokenizer_UTF8 () { CSphString sTmp; SetCaseFolding ( SPHINX_DEFAULT_UTF8_TABLE, sTmp ); } void CSphTokenizer_UTF8::SetBuffer ( BYTE * sBuffer, int iLength ) { // check that old one is over and that new length is sane assert ( iLength>=0 ); // set buffer m_pBuffer = sBuffer; m_pBufferMax = sBuffer + iLength; m_pCur = sBuffer; m_pTokenStart = m_pTokenEnd = NULL; m_pBlendStart = m_pBlendEnd = NULL; // fixup embedded zeroes with spaces for ( BYTE * p = m_pBuffer; p < m_pBufferMax; p++ ) if ( !*p ) *p = ' '; m_iOvershortCount = 0; m_bBoundary = m_bTokenBoundary = false; } BYTE * CSphTokenizer_UTF8::GetToken () { m_bWasSpecial = false; m_bBlended = false; m_iOvershortCount = 0; m_bTokenBoundary = false; if ( m_dSynonyms.GetLength() ) return GetTokenSyn (); // return pending blending variants BYTE * pVar = GetBlendedVariant (); if ( pVar ) return pVar; m_bBlendedPart = ( m_pBlendEnd!=NULL ); // whether this tokenizer supports escaping const bool bUseEscape = m_bEscaped; // in query mode, lets capture (soft-whitespace hard-whitespace) sequences and adjust overshort counter // sample queries would be (one NEAR $$$) or (one | $$$ two) where $ is not a valid character bool bGotNonToken = ( !m_bQueryMode || m_bPhrase ); // only do this in query mode, never in indexing mode, never within phrases bool bGotSoft = false; // hey Beavis he said soft huh huhhuh for ( ;; ) { // get next codepoint BYTE * pCur = m_pCur; // to redo special char, if there's a token already int iCodePoint = GetCodepoint(); // advances m_pCur int iCode = m_tLC.ToLower ( iCodePoint ); // handle escaping bool bWasEscaped = ( bUseEscape && iCodePoint=='\\' ); // whether current codepoint was escaped if ( bWasEscaped ) { iCodePoint = GetCodepoint(); iCode = m_tLC.ToLower ( iCodePoint ); if ( !Special2Simple ( iCode ) ) iCode = 0; } // handle eof if ( iCode<0 ) { // skip trailing short word FlushAccum (); if ( m_iLastTokenLenCloneBase ( this, bEscaped ); return pClone; } int CSphTokenizer_UTF8::GetCodepointLength ( int iCode ) const { if ( iCode<128 ) return 1; int iBytes = 0; while ( iCode & 0x80 ) { iBytes++; iCode <<= 1; } assert ( iBytes>=2 && iBytes<=4 ); return iBytes; } ///////////////////////////////////////////////////////////////////////////// bool CSphTokenizer_UTF8Ngram::SetNgramChars ( const char * sConfig, CSphString & sError ) { CSphVector dRemaps; CSphCharsetDefinitionParser tParser; if ( !tParser.Parse ( sConfig, dRemaps ) ) { sError = tParser.GetLastError(); return false; } m_tLC.AddRemaps ( dRemaps, FLAG_CODEPOINT_NGRAM | FLAG_CODEPOINT_SPECIAL ); // !COMMIT support other n-gram lengths than 1 m_sNgramCharsStr = sConfig; return true; } void CSphTokenizer_UTF8Ngram::SetNgramLen ( int iLen ) { assert ( iLen>0 ); m_iNgramLen = iLen; } BYTE * CSphTokenizer_UTF8Ngram::GetToken () { // !COMMIT support other n-gram lengths than 1 assert ( m_iNgramLen==1 ); return CSphTokenizer_UTF8::GetToken (); } ////////////////////////////////////////////////////////////////////////// CSphTokenizer_Filter::CSphTokenizer_Filter ( ISphTokenizer * pTokenizer, const CSphMultiformContainer * pContainer ) : m_pTokenizer ( pTokenizer ) , m_pMultiWordforms ( pContainer ) , m_iStoredStart ( 0 ) , m_iStoredLen ( 0 ) , m_bBuildMultiform ( false ) , m_pLastToken ( NULL ) { assert ( pTokenizer && pContainer ); m_dStoredTokens.Resize ( pContainer->m_iMaxTokens + 1 ); m_sTokenizedMultiform[0] = '\0'; } CSphTokenizer_Filter::~CSphTokenizer_Filter () { SafeDelete ( m_pTokenizer ); } void CSphTokenizer_Filter::FillTokenInfo ( StoredToken_t * pToken ) { pToken->m_bBoundary = m_pTokenizer->GetBoundary (); pToken->m_bSpecial = m_pTokenizer->WasTokenSpecial (); pToken->m_iOvershortCount = m_pTokenizer->GetOvershortCount (); pToken->m_iTokenLen = m_pTokenizer->GetLastTokenLen (); pToken->m_szTokenStart = m_pTokenizer->GetTokenStart (); pToken->m_szTokenEnd = m_pTokenizer->GetTokenEnd (); pToken->m_pBufferPtr = m_pTokenizer->GetBufferPtr (); } BYTE * CSphTokenizer_Filter::GetToken () { m_sTokenizedMultiform[0] = '\0'; BYTE * pToken = ( m_iStoredLen>0 ) ? m_dStoredTokens [m_iStoredStart].m_sToken : m_pTokenizer->GetToken (); if ( !pToken ) { memset ( &m_tLastToken, 0, sizeof ( m_tLastToken ) ); m_pLastToken = &m_tLastToken; return NULL; } int iSize = m_dStoredTokens.GetLength (); CSphMultiforms ** pWordforms = m_pMultiWordforms->m_Hash ( (const char *)pToken ); if ( !pWordforms ) { if ( m_iStoredLen ) { m_pLastToken = &(m_dStoredTokens[m_iStoredStart]); m_iStoredLen--; m_iStoredStart = (m_iStoredStart + 1) % iSize; } else { FillTokenInfo ( &m_tLastToken ); m_pLastToken = &m_tLastToken; bool bBlended = m_pTokenizer->TokenIsBlended(); m_bBlended = bBlended; m_bNonBlended = !bBlended; } return pToken; } if ( !m_iStoredLen ) { FillTokenInfo ( &m_dStoredTokens[m_iStoredStart] ); strcpy ( (char *)m_dStoredTokens[m_iStoredStart].m_sToken, (const char *)pToken ); // NOLINT m_iStoredLen++; } int iTokensNeeded = (*pWordforms)->m_iMaxTokens - m_iStoredLen + 1; for ( int i = 0; i < iTokensNeeded; i++ ) { pToken = m_pTokenizer->GetToken (); if ( !pToken ) break; int iIndex = (m_iStoredStart+m_iStoredLen) % iSize; FillTokenInfo ( &(m_dStoredTokens[iIndex]) ); strcpy ( (char *)m_dStoredTokens[iIndex].m_sToken, (const char *)pToken ); // NOLINT m_iStoredLen++; } if ( !m_iStoredLen ) return NULL; if ( m_iStoredLen<=(*pWordforms)->m_iMinTokens ) { m_pLastToken = &(m_dStoredTokens [m_iStoredStart]); m_iStoredLen--; m_iStoredStart = (m_iStoredStart + 1) % iSize; return m_pLastToken->m_sToken; } ARRAY_FOREACH ( i, (*pWordforms)->m_dWordforms ) { CSphMultiform * pCurForm = (*pWordforms)->m_dWordforms[i]; if ( m_iStoredLen<=pCurForm->m_dTokens.GetLength () ) continue; bool bFound = true; for ( int j = 0; j < pCurForm->m_dTokens.GetLength (); j++ ) { int iIndex = ( m_iStoredStart + j + 1 ) % iSize; const char * szStored = (const char*)m_dStoredTokens[iIndex].m_sToken; const char * szNormal = pCurForm->m_dTokens[j].cstr (); if ( *szNormal!=*szStored || strcasecmp ( szNormal, szStored ) ) { bFound = false; break; } } if ( bFound ) { int iTokensPerForm = 1+pCurForm->m_dTokens.GetLength(); m_tLastToken.m_bBoundary = false; m_tLastToken.m_bSpecial = false; m_tLastToken.m_iOvershortCount = m_dStoredTokens[m_iStoredStart].m_iOvershortCount; m_tLastToken.m_iTokenLen = pCurForm->m_iNormalTokenLen; m_tLastToken.m_szTokenStart = m_dStoredTokens[m_iStoredStart].m_szTokenStart; m_tLastToken.m_szTokenEnd = m_dStoredTokens[ ( m_iStoredStart+iTokensPerForm-1 ) % iSize ].m_szTokenEnd; m_tLastToken.m_pBufferPtr = m_dStoredTokens[ ( m_iStoredStart+iTokensPerForm-1 ) % iSize ].m_pBufferPtr; m_pLastToken = &m_tLastToken; if ( m_bBuildMultiform ) { BYTE * pOut = m_sTokenizedMultiform; BYTE * pMax = pOut + sizeof(m_sTokenizedMultiform); for ( int i=0; i=0 ); strcpy ( (char *)m_sOutMultiform, pCurForm->m_sNormalForm.cstr () ); // NOLINT return m_sOutMultiform; } } pToken = m_dStoredTokens[m_iStoredStart].m_sToken; m_pLastToken = &(m_dStoredTokens[m_iStoredStart]); m_iStoredStart = (m_iStoredStart + 1) % iSize; m_iStoredLen--; return pToken; } ISphTokenizer * CSphTokenizer_Filter::Clone ( bool bEscaped ) const { ISphTokenizer * pClone = m_pTokenizer->Clone ( bEscaped ); return CreateTokenFilter ( pClone, m_pMultiWordforms ); } void CSphTokenizer_Filter::SetBufferPtr ( const char * sNewPtr ) { m_pLastToken = NULL; m_iStoredLen = 0; m_iStoredStart = 0; m_pTokenizer->SetBufferPtr ( sNewPtr ); } void CSphTokenizer_Filter::SetBuffer ( BYTE * sBuffer, int iLength ) { m_pTokenizer->SetBuffer ( sBuffer, iLength ); SetBufferPtr ( (const char *)sBuffer ); } ///////////////////////////////////////////////////////////////////////////// // FILTER ///////////////////////////////////////////////////////////////////////////// CSphFilterSettings::CSphFilterSettings () : m_sAttrName ( "" ) , m_bExclude ( false ) , m_uMinValue ( 0 ) , m_uMaxValue ( UINT_MAX ) , m_pValues ( NULL ) , m_nValues ( 0 ) {} CSphFilterSettings::CSphFilterSettings ( const CSphFilterSettings & rhs ) { assert ( 0 ); (*this) = rhs; } void CSphFilterSettings::SetExternalValues ( const SphAttr_t * pValues, int nValues ) { m_pValues = pValues; m_nValues = nValues; } bool CSphFilterSettings::operator == ( const CSphFilterSettings & rhs ) const { // check name, mode, type if ( m_sAttrName!=rhs.m_sAttrName || m_bExclude!=rhs.m_bExclude || m_eType!=rhs.m_eType ) return false; switch ( m_eType ) { case SPH_FILTER_RANGE: return m_uMinValue==rhs.m_uMinValue && m_uMaxValue==rhs.m_uMaxValue; case SPH_FILTER_VALUES: if ( m_dValues.GetLength()!=rhs.m_dValues.GetLength() ) return false; ARRAY_FOREACH ( i, m_dValues ) if ( m_dValues[i]!=rhs.m_dValues[i] ) return false; return true; default: assert ( 0 && "internal error: unhandled filter type in comparison" ); return false; } } ///////////////////////////////////////////////////////////////////////////// // QUERY ///////////////////////////////////////////////////////////////////////////// CSphQuery::CSphQuery () : m_sIndexes ( "*" ) , m_sQuery ( "" ) , m_sRawQuery ( "" ) , m_iOffset ( 0 ) , m_iLimit ( 20 ) , m_pWeights ( NULL ) , m_iWeights ( 0 ) , m_eMode ( SPH_MATCH_ALL ) , m_eRanker ( SPH_RANK_DEFAULT ) , m_eSort ( SPH_SORT_RELEVANCE ) , m_iMaxMatches ( 1000 ) , m_eGroupFunc ( SPH_GROUPBY_ATTR ) , m_sGroupSortBy ( "@groupby desc" ) , m_sGroupDistinct ( "" ) , m_iCutoff ( 0 ) , m_iRetryCount ( 0 ) , m_iRetryDelay ( 0 ) , m_bGeoAnchor ( false ) , m_fGeoLatitude ( 0.0f ) , m_fGeoLongitude ( 0.0f ) , m_uMaxQueryMsec ( 0 ) , m_sComment ( "" ) , m_sSelect ( "" ) , m_bReverseScan ( false ) , m_iSQLSelectStart ( -1 ) , m_iSQLSelectEnd ( -1 ) , m_iOldVersion ( 0 ) , m_iOldGroups ( 0 ) , m_pOldGroups ( NULL ) , m_iOldMinTS ( 0 ) , m_iOldMaxTS ( UINT_MAX ) , m_iOldMinGID ( 0 ) , m_iOldMaxGID ( UINT_MAX ) , m_eCollation ( SPH_COLLATION_DEFAULT ) , m_bAgent ( false ) {} CSphQuery::~CSphQuery () { } int CSphQuery::GetIndexWeight ( const char * sName ) const { ARRAY_FOREACH ( i, m_dIndexWeights ) if ( m_dIndexWeights[i].m_sName==sName ) return m_dIndexWeights[i].m_iValue; return 1; } ////////////////////////////////////////////////////////////////////////// struct SelectBounds_t { int m_iStart; int m_iEnd; }; #define YYSTYPE SelectBounds_t #include "yysphinxselect.h" class SelectParser_t { public: int GetToken ( YYSTYPE * lvalp ); void AddItem ( YYSTYPE * pExpr, ESphAggrFunc eAggrFunc=SPH_AGGR_NONE, YYSTYPE * pStart=NULL, YYSTYPE * pEnd=NULL ); void AddItem ( const char * pToken, YYSTYPE * pStart=NULL, YYSTYPE * pEnd=NULL ); void AliasLastItem ( YYSTYPE * pAlias ); private: void AutoAlias ( CSphQueryItem & tItem, YYSTYPE * pStart, YYSTYPE * pEnd ); public: CSphString m_sParserError; const char * m_pLastTokenStart; const char * m_pStart; const char * m_pCur; CSphQuery * m_pQuery; }; int yylex ( YYSTYPE * lvalp, SelectParser_t * pParser ) { return pParser->GetToken ( lvalp );} void yyerror ( SelectParser_t * pParser, const char * sMessage ) { pParser->m_sParserError.SetSprintf ( "%s near '%s'", sMessage, pParser->m_pLastTokenStart ); } #include "yysphinxselect.c" int SelectParser_t::GetToken ( YYSTYPE * lvalp ) { // skip whitespace, check eof while ( isspace ( *m_pCur ) ) m_pCur++; if ( !*m_pCur ) return 0; m_pLastTokenStart = m_pCur; lvalp->m_iStart = m_pCur-m_pStart; // check for constant if ( isdigit ( *m_pCur ) ) { char * pEnd = NULL; double fDummy; // to avoid gcc unused result warning fDummy = strtod ( m_pCur, &pEnd ); m_pCur = pEnd; lvalp->m_iEnd = m_pCur-m_pStart; return SEL_TOKEN; } // check for token if ( sphIsAttr ( m_pCur[0] ) || ( m_pCur[0]=='@' && sphIsAttr ( m_pCur[1] ) && !isdigit ( m_pCur[1] ) ) ) { m_pCur++; while ( sphIsAttr ( *m_pCur ) ) m_pCur++; lvalp->m_iEnd = m_pCur-m_pStart; #define LOC_CHECK(_str,_len,_ret) \ if ( lvalp->m_iEnd==_len+lvalp->m_iStart && strncasecmp ( m_pStart+lvalp->m_iStart, _str, _len )==0 ) return _ret; LOC_CHECK ( "ID", 2, SEL_ID ); LOC_CHECK ( "AS", 2, SEL_AS ); LOC_CHECK ( "OR", 2, TOK_OR ); LOC_CHECK ( "AND", 3, TOK_AND ); LOC_CHECK ( "NOT", 3, TOK_NOT ); LOC_CHECK ( "AVG", 3, SEL_AVG ); LOC_CHECK ( "MIN", 3, SEL_MIN ); LOC_CHECK ( "MAX", 3, SEL_MAX ); LOC_CHECK ( "SUM", 3, SEL_SUM ); LOC_CHECK ( "COUNT", 5, SEL_COUNT ); LOC_CHECK ( "DISTINCT", 8, SEL_DISTINCT ); LOC_CHECK ( "WEIGHT", 6, SEL_WEIGHT ); #undef LOC_CHECK return SEL_TOKEN; } // check for equality checks lvalp->m_iEnd = 1+lvalp->m_iStart; switch ( *m_pCur ) { case '<': m_pCur++; if ( *m_pCur=='>' ) { m_pCur++; lvalp->m_iEnd++; return TOK_NE; } if ( *m_pCur=='=' ) { m_pCur++; lvalp->m_iEnd++; return TOK_LTE; } return '<'; case '>': m_pCur++; if ( *m_pCur=='=' ) { m_pCur++; lvalp->m_iEnd++; return TOK_GTE; } return '>'; case '=': m_pCur++; if ( *m_pCur=='=' ) { m_pCur++; lvalp->m_iEnd++; } return TOK_EQ; case '\'': { const char cEnd = *m_pCur; for ( const char * s = m_pCur+1; *s; s++ ) { if ( *s==cEnd ) { m_pCur = s+1; return TOK_CONST_STRING; } } return -1; } } // return char as a token return *m_pCur++; } void SelectParser_t::AutoAlias ( CSphQueryItem & tItem, YYSTYPE * pStart, YYSTYPE * pEnd ) { if ( pStart && pEnd ) { tItem.m_sAlias.SetBinary ( m_pStart + pStart->m_iStart, pEnd->m_iEnd - pStart->m_iStart ); tItem.m_sAlias.ToLower(); } else tItem.m_sAlias = tItem.m_sExpr; } void SelectParser_t::AddItem ( YYSTYPE * pExpr, ESphAggrFunc eAggrFunc, YYSTYPE * pStart, YYSTYPE * pEnd ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Add(); tItem.m_sExpr.SetBinary ( m_pStart + pExpr->m_iStart, pExpr->m_iEnd - pExpr->m_iStart ); tItem.m_sExpr.ToLower(); tItem.m_eAggrFunc = eAggrFunc; AutoAlias ( tItem, pStart, pEnd ); } void SelectParser_t::AddItem ( const char * pToken, YYSTYPE * pStart, YYSTYPE * pEnd ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Add(); tItem.m_sExpr = pToken; tItem.m_eAggrFunc = SPH_AGGR_NONE; tItem.m_sExpr.ToLower(); AutoAlias ( tItem, pStart, pEnd ); } void SelectParser_t::AliasLastItem ( YYSTYPE * pAlias ) { if ( pAlias ) { CSphQueryItem & tItem = m_pQuery->m_dItems.Last(); tItem.m_sAlias.SetBinary ( m_pStart + pAlias->m_iStart, pAlias->m_iEnd - pAlias->m_iStart ); tItem.m_sAlias.ToLower(); } } bool CSphQuery::ParseSelectList ( CSphString & sError ) { m_dItems.Reset (); if ( m_sSelect.IsEmpty() ) return true; // empty is ok; will just return everything SelectParser_t tParser; tParser.m_pStart = m_sSelect.cstr(); tParser.m_pCur = m_sSelect.cstr(); tParser.m_pQuery = this; yyparse ( &tParser ); sError = tParser.m_sParserError; return sError.IsEmpty (); } ///////////////////////////////////////////////////////////////////////////// // SCHEMA ///////////////////////////////////////////////////////////////////////////// static CSphString sphDumpAttr ( const CSphColumnInfo & tAttr ) { CSphString sRes; sRes.SetSprintf ( "%s %s:%d@%d", sphTypeName ( tAttr.m_eAttrType ), tAttr.m_sName.cstr(), tAttr.m_tLocator.m_iBitCount, tAttr.m_tLocator.m_iBitOffset ); return sRes; } bool CSphSchema::CompareTo ( const CSphSchema & rhs, CSphString & sError ) const { // check attr count if ( GetAttrsCount()!=rhs.GetAttrsCount() ) { sError.SetSprintf ( "attribute count mismatch (me=%s, in=%s, myattrs=%d, inattrs=%d)", m_sName.cstr(), rhs.m_sName.cstr(), GetAttrsCount(), rhs.GetAttrsCount() ); return false; } // check attrs ARRAY_FOREACH ( i, m_dAttrs ) if (!( rhs.m_dAttrs[i]==m_dAttrs[i] )) { sError.SetSprintf ( "attribute mismatch (me=%s, in=%s, idx=%d, myattr=%s, inattr=%s)", m_sName.cstr(), rhs.m_sName.cstr(), i, sphDumpAttr ( m_dAttrs[i] ).cstr(), sphDumpAttr ( rhs.m_dAttrs[i] ).cstr() ); return false; } // check field count if ( rhs.m_dFields.GetLength()!=m_dFields.GetLength() ) { sError.SetSprintf ( "fulltext fields count mismatch (me=%s, in=%s, myfields=%d, infields=%d)", m_sName.cstr(), rhs.m_sName.cstr(), m_dFields.GetLength(), rhs.m_dFields.GetLength() ); return false; } // check fulltext field names ARRAY_FOREACH ( i, rhs.m_dFields ) if ( rhs.m_dFields[i].m_sName!=m_dFields[i].m_sName ) { sError.SetSprintf ( "fulltext field mismatch (me=%s, myfield=%s, idx=%d, in=%s, infield=%s)", m_sName.cstr(), rhs.m_sName.cstr(), i, m_dFields[i].m_sName.cstr(), rhs.m_dFields[i].m_sName.cstr() ); return false; } return true; } int CSphSchema::GetFieldIndex ( const char * sName ) const { if ( !sName ) return -1; ARRAY_FOREACH ( i, m_dFields ) if ( strcasecmp ( m_dFields[i].m_sName.cstr(), sName )==0 ) return i; return -1; } int CSphSchema::GetAttrIndex ( const char * sName ) const { if ( !sName ) return -1; ARRAY_FOREACH ( i, m_dAttrs ) if ( m_dAttrs[i].m_sName==sName ) return i; return -1; } const CSphColumnInfo * CSphSchema::GetAttr ( const char * sName ) const { int iIndex = GetAttrIndex ( sName ); if ( iIndex>=0 ) return &m_dAttrs[iIndex]; return NULL; } void CSphSchema::Reset () { m_dFields.Reset(); ResetAttrs (); } void CSphSchema::ResetAttrs () { m_dAttrs.Reset(); m_dStaticUsed.Reset(); m_dDynamicUsed.Reset(); m_iStaticSize = 0; } void CSphSchema::AddAttr ( const CSphColumnInfo & tCol, bool bDynamic ) { assert ( tCol.m_eAttrType!=SPH_ATTR_NONE ); if ( tCol.m_eAttrType==SPH_ATTR_NONE ) return; m_dAttrs.Add ( tCol ); CSphAttrLocator & tLoc = m_dAttrs.Last().m_tLocator; if ( tLoc.IsID() ) return; int iBits = ROWITEM_BITS; if ( tCol.m_tLocator.m_iBitCount>0 ) iBits = tCol.m_tLocator.m_iBitCount; if ( tCol.m_eAttrType==SPH_ATTR_BOOL ) iBits = 1; if ( tCol.m_eAttrType==SPH_ATTR_BIGINT ) iBits = 64; tLoc.m_iBitCount = iBits; tLoc.m_bDynamic = bDynamic; CSphVector & dUsed = bDynamic ? m_dDynamicUsed : m_dStaticUsed; if ( iBits>=ROWITEM_BITS ) { tLoc.m_iBitOffset = dUsed.GetLength()*ROWITEM_BITS; int iItems = (iBits+ROWITEM_BITS-1) / ROWITEM_BITS; for ( int i=0; i=ROWITEM_BITS ) { for ( int i=0; i=0 ); if ( m_dStaticUsed[iItem]<=0 ) m_iStaticSize--; } // do remove m_dAttrs.Remove ( iIndex ); } /////////////////////////////////////////////////////////////////////////////// // BIT-ENCODED FILE OUTPUT /////////////////////////////////////////////////////////////////////////////// CSphWriter::CSphWriter () : m_sName ( "" ) , m_iPos ( -1 ) , m_iWritten ( 0 ) , m_iFD ( -1 ) , m_iPoolUsed ( 0 ) , m_pBuffer ( NULL ) , m_pPool ( NULL ) , m_bOwnFile ( false ) , m_pSharedOffset ( NULL ) , m_iBufferSize ( 262144 ) , m_bError ( false ) , m_pError ( NULL ) { } void CSphWriter::SetBufferSize ( int iBufferSize ) { if ( iBufferSize!=m_iBufferSize ) { m_iBufferSize = Max ( iBufferSize, 262144 ); if ( m_pBuffer ) SafeDeleteArray ( m_pBuffer ); } } bool CSphWriter::OpenFile ( const CSphString & sName, CSphString & sErrorBuffer ) { assert ( !sName.IsEmpty() ); assert ( m_iFD<0 && "already open" ); m_bOwnFile = true; m_sName = sName; m_pError = &sErrorBuffer; if ( !m_pBuffer ) m_pBuffer = new BYTE [ m_iBufferSize ]; m_iFD = ::open ( m_sName.cstr(), SPH_O_NEW, 0644 ); m_pPool = m_pBuffer; m_iPoolUsed = 0; m_iPos = 0; m_iWritten = 0; m_bError = ( m_iFD<0 ); if ( m_bError ) m_pError->SetSprintf ( "failed to create %s: %s" , sName.cstr(), strerror(errno) ); return !m_bError; } void CSphWriter::SetFile ( int iFD, SphOffset_t * pSharedOffset ) { assert ( m_iFD<0 && "already open" ); m_bOwnFile = false; if ( !m_pBuffer ) m_pBuffer = new BYTE [ m_iBufferSize ]; m_iFD = iFD; m_pPool = m_pBuffer; m_iPoolUsed = 0; m_iPos = 0; m_iWritten = 0; m_pSharedOffset = pSharedOffset; } CSphWriter::~CSphWriter () { CloseFile (); SafeDeleteArray ( m_pBuffer ); } void CSphWriter::CloseFile ( bool bTruncate ) { if ( m_iFD>=0 ) { Flush (); if ( bTruncate ) sphTruncate ( m_iFD ); if ( m_bOwnFile ) ::close ( m_iFD ); m_iFD = -1; } } void CSphWriter::UnlinkFile() { if ( m_bOwnFile ) { if ( m_iFD>=0 ) ::close ( m_iFD ); m_iFD = -1; ::unlink ( m_sName.cstr() ); m_sName = ""; } SafeDeleteArray ( m_pBuffer ); } void CSphWriter::PutByte ( int data ) { if ( m_iPoolUsed==m_iBufferSize ) Flush (); *m_pPool++ = BYTE ( data & 0xff ); m_iPoolUsed++; m_iPos++; } void CSphWriter::PutBytes ( const void * pData, int iSize ) { const BYTE * pBuf = (const BYTE *) pData; while ( iSize>0 ) { int iPut = Min ( iSize, m_iBufferSize ); if ( m_iPoolUsed+iPut>m_iBufferSize ) Flush (); assert ( m_iPoolUsed+iPut<=m_iBufferSize ); memcpy ( m_pPool, pBuf, iPut ); m_pPool += iPut; m_iPoolUsed += iPut; m_iPos += iPut; pBuf += iPut; iSize -= iPut; } } void CSphWriter::ZipInt ( DWORD uValue ) { int iBytes = 1; DWORD u = ( uValue>>7 ); while ( u ) { u >>= 7; iBytes++; } while ( iBytes-- ) PutByte ( ( 0x7f & ( uValue >> (7*iBytes) ) ) | ( iBytes ? 0x80 : 0 ) ); } void CSphWriter::ZipOffset ( SphOffset_t uValue ) { int iBytes = 1; uint64_t u = ((uint64_t)uValue)>>7; while ( u ) { u >>= 7; iBytes++; } while ( iBytes-- ) PutByte ( ( 0x7f & (DWORD)( uValue >> (7*iBytes) ) ) | ( iBytes ? 0x80 : 0 ) ); } void CSphWriter::ZipOffsets ( CSphVector * pData ) { assert ( pData ); SphOffset_t * pValue = &((*pData)[0]); int n = pData->GetLength (); while ( n-->0 ) { SphOffset_t uValue = *pValue++; int iBytes = 1; uint64_t u = ((uint64_t)uValue)>>7; while ( u ) { u >>= 7; iBytes++; } while ( iBytes-- ) PutByte ( ( 0x7f & (DWORD)( uValue >> (7*iBytes) ) ) | ( iBytes ? 0x80 : 0 ) ); } } void CSphWriter::Flush () { PROFILE ( write_hits ); if ( m_pSharedOffset && *m_pSharedOffset!=m_iWritten ) sphSeek ( m_iFD, m_iWritten, SEEK_SET ); if ( !sphWriteThrottled ( m_iFD, m_pBuffer, m_iPoolUsed, m_sName.cstr(), *m_pError ) ) m_bError = true; m_iWritten += m_iPoolUsed; m_iPoolUsed = 0; m_pPool = m_pBuffer; if ( m_pSharedOffset ) *m_pSharedOffset = m_iWritten; } void CSphWriter::PutString ( const char * szString ) { int iLen = szString ? strlen ( szString ) : 0; PutDword ( iLen ); if ( iLen ) PutBytes ( szString, iLen ); } void CSphWriter::PutString ( const CSphString & sString ) { int iLen = sString.Length(); PutDword ( iLen ); if ( iLen ) PutBytes ( sString.cstr(), iLen ); } void CSphWriter::SeekTo ( SphOffset_t iPos ) { assert ( iPos>=0 ); if ( iPos>=m_iWritten && iPos<=( m_iWritten + m_iPoolUsed ) ) { // seeking inside the buffer m_iPoolUsed = (int)( iPos - m_iWritten ); m_pPool = m_pBuffer + m_iPoolUsed; } else { assert ( iPos0 ); } CSphReader::~CSphReader () { if ( m_bBufOwned ) SafeDeleteArray ( m_pBuff ); } void CSphReader::SetBuffers ( int iReadBuffer, int iReadUnhinted ) { if ( !m_pBuff ) m_iBufSize = iReadBuffer; m_iReadUnhinted = iReadUnhinted; } void CSphReader::SetFile ( int iFD, const char * sFilename ) { m_iFD = iFD; m_iPos = 0; m_iBuffPos = 0; m_iBuffUsed = 0; m_sFilename = sFilename; } void CSphReader::SetFile ( const CSphAutofile & tFile ) { SetFile ( tFile.GetFD(), tFile.GetFilename() ); } void CSphReader::Reset () { SetFile ( -1, "" ); } void CSphReader::SeekTo ( SphOffset_t iPos, int iSizeHint ) { assert ( iPos>=0 ); #ifndef NDEBUG #if PARANOID struct_stat tStat; fstat ( m_iFD, &tStat ); if ( iPos > tStat.st_size ) sphDie ( "INTERNAL ERROR: seeking past the end of file" ); #endif #endif if ( iPos>=m_iPos && iPos>32 ); DWORD uRes; if ( !ReadFile ( hFile, pBuf, iBytes, &uRes, &tOverlapped ) ) { DWORD uErr = GetLastError(); if ( uErr==ERROR_HANDLE_EOF ) return 0; errno = uErr; // FIXME! should remap from Win to POSIX return -1; } if ( g_bIOStats ) { g_IOStats.m_iReadTime += sphMicroTimer() - tmStart; g_IOStats.m_iReadOps++; g_IOStats.m_iReadBytes += iBytes; } return uRes; } #else #if HAVE_PREAD // atomic seek+read for non-Windows systems with pread() call int sphPread ( int iFD, void * pBuf, int iBytes, SphOffset_t iOffset ) { if ( !g_bIOStats ) return ::pread ( iFD, pBuf, iBytes, iOffset ); int64_t tmStart = sphMicroTimer(); int iRes = (int) ::pread ( iFD, pBuf, iBytes, iOffset ); g_IOStats.m_iReadTime += sphMicroTimer() - tmStart; g_IOStats.m_iReadOps++; g_IOStats.m_iReadBytes += iBytes; return iRes; } #else // generic fallback; prone to races between seek and read int sphPread ( int iFD, void * pBuf, int iBytes, SphOffset_t iOffset ) { if ( sphSeek ( iFD, iOffset, SEEK_SET )==-1 ) return -1; return sphReadThrottled ( iFD, pBuf, iBytes ); } #endif // HAVE_PREAD #endif // USE_WINDOWS void CSphReader::UpdateCache () { PROFILE ( read_hits ); assert ( m_iFD>=0 ); // alloc buf on first actual read if ( !m_pBuff ) { if ( m_iBufSize<=0 ) m_iBufSize = DEFAULT_READ_BUFFER; m_bBufOwned = true; m_pBuff = new BYTE [ m_iBufSize ]; } // stream position could be changed externally // so let's just hope that the OS optimizes redundant seeks SphOffset_t iNewPos = m_iPos + Min ( m_iBuffPos, m_iBuffUsed ); if ( m_iSizeHint<=0 ) m_iSizeHint = ( m_iReadUnhinted>0 ) ? m_iReadUnhinted : DEFAULT_READ_UNHINTED; int iReadLen = Min ( m_iSizeHint, m_iBufSize ); m_iBuffPos = 0; m_iBuffUsed = sphPread ( m_iFD, m_pBuff, iReadLen, iNewPos ); // FIXME! what about throttling? if ( m_iBuffUsed<0 ) { m_iBuffUsed = m_iBuffPos = 0; m_bError = true; m_sError.SetSprintf ( "pread error in %s: pos="INT64_FMT", len=%d, code=%d, msg=%s", m_sFilename.cstr(), (int64_t)iNewPos, iReadLen, errno, strerror(errno) ); return; } // all fine, adjust offset and hint m_iSizeHint -= m_iBuffUsed; m_iPos = iNewPos; } int CSphReader::GetByte () { if ( m_iBuffPos>=m_iBuffUsed ) { UpdateCache (); if ( m_iBuffPos>=m_iBuffUsed ) return 0; // unexpected io failure } assert ( m_iBuffPosm_iBufSize ) { int iLen = m_iBuffUsed - m_iBuffPos; assert ( iLen<=m_iBufSize ); memcpy ( pOut, m_pBuff+m_iBuffPos, iLen ); m_iBuffPos += iLen; pOut += iLen; iSize -= iLen; m_iSizeHint = iSize; // FIXME! if ( iSize>0 ) { UpdateCache (); if ( !m_iBuffUsed ) { memset ( pData, 0, iSize ); return; // unexpected io failure } } } if ( m_iBuffPos+iSize>m_iBuffUsed ) { // move old buffer tail to buffer head to avoid losing the data const int iLen = m_iBuffUsed - m_iBuffPos; if ( iLen>0 ) { memcpy ( pOut, m_pBuff+m_iBuffPos, iLen ); m_iBuffPos += iLen; pOut += iLen; iSize -= iLen; } m_iSizeHint = iSize - m_iBuffUsed + m_iBuffPos; // FIXME! UpdateCache (); if ( m_iBuffPos+iSize>m_iBuffUsed ) { memset ( pData, 0, iSize ); // unexpected io failure return; } } assert ( (m_iBuffPos+iSize)<=m_iBuffUsed ); memcpy ( pOut, m_pBuff+m_iBuffPos, iSize ); m_iBuffPos += iSize; } int CSphReader::GetBytesZerocopy ( const BYTE ** ppData, int iMax ) { if ( m_iBuffPos>=m_iBuffUsed ) { UpdateCache (); if ( m_iBuffPos>=m_iBuffUsed ) return 0; // unexpected io failure } int iChunk = Min ( m_iBuffUsed-m_iBuffPos, iMax ); *ppData = m_pBuff + m_iBuffPos; m_iBuffPos += iChunk; return iChunk; } int CSphReader::GetLine ( char * sBuffer, int iMaxLen ) { int iOutPos = 0; iMaxLen--; // reserve space for trailing '\0' // grab as many chars as we can while ( iOutPos=m_iBuffUsed ) { UpdateCache (); if ( m_iBuffPos>=m_iBuffUsed ) { if ( iOutPos==0 ) return -1; // current line is empty; indicate eof break; // return current line; will return eof next time } } // break on CR or LF if ( m_pBuff[m_iBuffPos]=='\r' || m_pBuff[m_iBuffPos]=='\n' ) break; // one more valid char sBuffer[iOutPos++] = m_pBuff[m_iBuffPos++]; } // skip everything until the newline or eof for ( ;; ) { // read next chunk if necessary if ( m_iBuffPos>=m_iBuffUsed ) UpdateCache (); // eof? if ( m_iBuffPos>=m_iBuffUsed ) break; // newline? if ( m_pBuff[m_iBuffPos++]=='\n' ) break; } // finalize sBuffer[iOutPos] = '\0'; return iOutPos; } ///////////////////////////////////////////////////////////////////////////// #if PARANOID #define SPH_UNZIP_IMPL(_type,_getexpr) \ register DWORD b = 0; \ register _type v = 0; \ int it = 0; \ do { b = _getexpr; v = ( v<<7 ) + ( b&0x7f ); it++; } while ( b&0x80 ); \ assert ( (it-1)*7<=sizeof(_type)*8 ); \ return v; #else #define SPH_UNZIP_IMPL(_type,_getexpr) \ register DWORD b = 0; \ register _type v = 0; \ do { b = _getexpr; v = ( v<<7 ) + ( b&0x7f ); } while ( b&0x80 ); \ return v; #endif // PARANOID DWORD sphUnzipInt ( const BYTE * & pBuf ) { SPH_UNZIP_IMPL ( DWORD, *pBuf++ ); } SphOffset_t sphUnzipOffset ( const BYTE * & pBuf ) { SPH_UNZIP_IMPL ( SphOffset_t, *pBuf++ ); } DWORD CSphReader::UnzipInt () { SPH_UNZIP_IMPL ( DWORD, GetByte() ); } SphOffset_t CSphReader::UnzipOffset () { SPH_UNZIP_IMPL ( SphOffset_t, GetByte() ); } #if USE_64BIT #define sphUnzipWordid sphUnzipOffset #else #define sphUnzipWordid sphUnzipInt #endif ///////////////////////////////////////////////////////////////////////////// const CSphReader & CSphReader::operator = ( const CSphReader & rhs ) { SetFile ( rhs.m_iFD, rhs.m_sFilename.cstr() ); SeekTo ( rhs.m_iPos + rhs.m_iBuffPos, rhs.m_iSizeHint ); return *this; } DWORD CSphReader::GetDword () { DWORD uRes = 0; GetBytes ( &uRes, sizeof(DWORD) ); return uRes; } SphOffset_t CSphReader::GetOffset () { SphOffset_t uRes = 0; GetBytes ( &uRes, sizeof(SphOffset_t) ); return uRes; } CSphString CSphReader::GetString () { CSphString sRes; DWORD iLen = GetDword (); if ( iLen ) { char * sBuf = new char [ iLen ]; GetBytes ( sBuf, iLen ); sRes.SetBinary ( sBuf, iLen ); SafeDeleteArray ( sBuf ); } return sRes; } ////////////////////////////////////////////////////////////////////////// CSphAutoreader::~CSphAutoreader () { Close (); } bool CSphAutoreader::Open ( const CSphString & sFilename, CSphString & sError ) { assert ( m_iFD<0 ); assert ( !sFilename.IsEmpty() ); m_iFD = ::open ( sFilename.cstr(), SPH_O_READ, 0644 ); m_iPos = 0; m_iBuffPos = 0; m_iBuffUsed = 0; m_sFilename = sFilename; if ( m_iFD<0 ) sError.SetSprintf ( "failed to open %s: %s", sFilename.cstr(), strerror(errno) ); return ( m_iFD>=0 ); } void CSphAutoreader::Close () { if ( m_iFD>=0 ) ::close ( m_iFD ); m_iFD = -1; } SphOffset_t CSphAutoreader::GetFilesize () { assert ( m_iFD>=0 ); struct_stat st; if ( m_iFD<0 || fstat ( m_iFD, &st )<0 ) return -1; return st.st_size; } ///////////////////////////////////////////////////////////////////////////// // QUERY RESULT ///////////////////////////////////////////////////////////////////////////// CSphQueryResult::CSphQueryResult () : m_tSchema ( "query_result" ) { m_iQueryTime = 0; m_iCpuTime = 0; m_iMultiplier = 1; m_iTotalMatches = 0; m_pMva = NULL; m_pStrings = NULL; m_iOffset = 0; m_iCount = 0; m_iSuccesses = 0; } CSphQueryResult::~CSphQueryResult () { ARRAY_FOREACH ( i, m_dStorage2Free ) { SafeDeleteArray ( m_dStorage2Free[i] ); } } void CSphQueryResult::LeakStorages ( CSphQueryResult & tDst ) { ARRAY_FOREACH ( i, m_dStorage2Free ) tDst.m_dStorage2Free.Add ( m_dStorage2Free[i] ); m_dStorage2Free.Reset(); } ///////////////////////////////////////////////////////////////////////////// // CHUNK READER ///////////////////////////////////////////////////////////////////////////// CSphBin::CSphBin ( ESphHitless eMode, bool bWordDict ) : m_eMode ( eMode ) , m_dBuffer ( NULL ) , m_pCurrent ( NULL ) , m_iLeft ( 0 ) , m_iDone ( 0 ) , m_eState ( BIN_POS ) , m_bWordDict ( bWordDict ) , m_bError ( false ) , m_iFile ( -1 ) , m_pFilePos ( NULL ) , m_iFilePos ( 0 ) , m_iFileLeft ( 0 ) { m_tHit.m_sKeyword = bWordDict ? m_sKeyword : NULL; m_sKeyword[0] = '\0'; #ifndef NDEBUG m_iLastWordID = 0; m_sLastKeyword[0] = '\0'; #endif } int CSphBin::CalcBinSize ( int iMemoryLimit, int iBlocks, const char * sPhase, bool bWarn ) { if ( iBlocks<=0 ) return CSphBin::MIN_SIZE; int iBinSize = ( ( iMemoryLimit/iBlocks + 2048 ) >> 12 ) << 12; // round to 4k if ( iBinSize=MIN_SIZE ); assert ( pSharedOffset ); m_iFile = iFD; m_pFilePos = pSharedOffset; m_iSize = iBinSize; m_dBuffer = new BYTE [ iBinSize ]; m_pCurrent = m_dBuffer; m_tHit.m_iDocID = 0; m_tHit.m_iWordID = 0; m_tHit.m_iWordPos = EMPTY_HIT; m_tHit.m_dFieldMask.Unset(); m_bError = false; } CSphBin::~CSphBin () { SafeDeleteArray ( m_dBuffer ); } int CSphBin::ReadByte () { BYTE r; if ( !m_iLeft ) { PROFILE ( read_hits ); if ( *m_pFilePos!=m_iFilePos ) { sphSeek ( m_iFile, m_iFilePos, SEEK_SET ); *m_pFilePos = m_iFilePos; } int n = m_iFileLeft > m_iSize ? m_iSize : (int)m_iFileLeft; if ( n==0 ) { m_iDone = 1; m_iLeft = 1; } else { assert ( m_dBuffer ); if ( sphReadThrottled ( m_iFile, m_dBuffer, n )!=(size_t)n ) { m_bError = true; return -2; } m_iLeft = n; m_iFilePos += n; m_iFileLeft -= n; m_pCurrent = m_dBuffer; *m_pFilePos += n; } } if ( m_iDone ) { m_bError = true; // unexpected (!) eof return -1; } m_iLeft--; r = *(m_pCurrent); m_pCurrent++; return r; } ESphBinRead CSphBin::ReadBytes ( void * pDest, int iBytes ) { assert ( iBytes>0 ); assert ( iBytes<=m_iSize ); if ( m_iDone ) return BIN_READ_EOF; if ( m_iLeft=iBytes ); m_iLeft -= iBytes; memcpy ( pDest, m_pCurrent, iBytes ); m_pCurrent += iBytes; return BIN_READ_OK; } SphWordID_t CSphBin::ReadVLB () { SphWordID_t uValue = 0; int iByte, iOffset = 0; do { if ( ( iByte = ReadByte() )<0 ) return 0; uValue += ( ( SphWordID_t ( iByte & 0x7f ) ) << iOffset ); iOffset += 7; } while ( iByte & 0x80 ); return uValue; } DWORD CSphBin::UnzipInt () { register int b = 0; register DWORD v = 0; do { b = ReadByte(); if ( b<0 ) b = 0; v = ( v<<7 ) + ( b & 0x7f ); } while ( b & 0x80 ); return v; } SphOffset_t CSphBin::UnzipOffset () { register int b = 0; register SphOffset_t v = 0; do { b = ReadByte(); if ( b<0 ) b = 0; v = ( v<<7 ) + ( b & 0x7f ); } while ( b & 0x80 ); return v; } int CSphBin::ReadHit ( CSphAggregateHit * pOut, int iRowitems, CSphRowitem * pRowitems ) { // expected EOB if ( m_iDone ) { pOut->m_iWordID = 0; return 1; } CSphAggregateHit & tHit = m_tHit; // shortcut for ( ;; ) { // SPH_MAX_WORD_LEN is now 42 only to keep ReadVLB() below // technically, we can just use different functions on different paths, if ever needed STATIC_ASSERT ( SPH_MAX_WORD_LEN*3<=127, KEYWORD_TOO_LONG ); SphWordID_t uDelta = ReadVLB(); if ( uDelta ) { switch ( m_eState ) { case BIN_WORD: if ( m_bWordDict ) { #ifdef NDEBUG // FIXME?! move this under PARANOID or something? // or just introduce an assert() checked release build? if ( uDelta>=sizeof(m_sKeyword) ) sphDie ( "INTERNAL ERROR: corrupted keyword length (len="UINT64_FMT", deltapos="UINT64_FMT")", (uint64_t)uDelta, (uint64_t)(m_iFilePos-m_iLeft) ); #else assert ( uDelta>0 && uDelta>= 1; } tHit.m_iWordPos += (DWORD)uDelta; *pOut = tHit; return 1; default: sphDie ( "INTERNAL ERROR: unknown bin state (state=%d)", m_eState ); } } else { switch ( m_eState ) { case BIN_POS: m_eState = BIN_DOC; break; case BIN_DOC: m_eState = BIN_WORD; break; case BIN_WORD: m_iDone = 1; pOut->m_iWordID = 0; return 1; default: sphDie ( "INTERNAL ERROR: unknown bin state (state=%d)", m_eState ); } } } } bool CSphBin::IsEOF () const { return m_iDone!=0 || m_iFileLeft<=0; } bool CSphBin::IsDone () const { return m_iDone!=0 || ( m_iFileLeft<=0 && m_iLeft<=0 ); } ESphBinRead CSphBin::Precache () { if ( m_iFileLeft > m_iSize-m_iLeft ) { m_bError = true; return BIN_PRECACHE_ERROR; } if ( !m_iFileLeft ) return BIN_PRECACHE_OK; if ( *m_pFilePos!=m_iFilePos ) { sphSeek ( m_iFile, m_iFilePos, SEEK_SET ); *m_pFilePos = m_iFilePos; } assert ( m_dBuffer ); memmove ( m_dBuffer, m_pCurrent, m_iLeft ); if ( sphReadThrottled ( m_iFile, m_dBuffer+m_iLeft, m_iFileLeft )!=(size_t)m_iFileLeft ) { m_bError = true; return BIN_READ_ERROR; } m_iLeft += m_iFileLeft; m_iFilePos += m_iFileLeft; m_iFileLeft -= m_iFileLeft; m_pCurrent = m_dBuffer; *m_pFilePos += m_iFileLeft; return BIN_PRECACHE_OK; } ////////////////////////////////////////////////////////////////////////// // INDEX SETTINGS ////////////////////////////////////////////////////////////////////////// CSphIndexSettings::CSphIndexSettings () : m_eDocinfo ( SPH_DOCINFO_NONE ) , m_eHitFormat ( SPH_HIT_FORMAT_PLAIN ) , m_bHtmlStrip ( false ) , m_eHitless ( SPH_HITLESS_NONE ) { } ////////////////////////////////////////////////////////////////////////// // GLOBAL MVA STORAGE ARENA ////////////////////////////////////////////////////////////////////////// class tTester : public ISphNoncopyable { public: virtual void Reset() = 0; virtual void TestData ( int iData ) = 0; virtual ~tTester() {} }; /// shared-memory arena allocator /// manages small tagged dword strings, upto 4096 bytes in size class CSphArena { public: CSphArena (); ~CSphArena (); DWORD * ReInit ( int uMaxBytes ); const char * GetError () const { return m_sError.cstr(); } int TaggedAlloc ( int iTag, int iBytes ); void TaggedFreeIndex ( int iTag, int iIndex ); void TaggedFreeTag ( int iTag ); void ExamineTag ( tTester* pTest, int iTag ); protected: static const int MIN_BITS = 4; static const int MAX_BITS = 12; static const int NUM_SIZES = MAX_BITS-MIN_BITS+2; ///< one for 0 (empty pages), and one for each size from min to max static const int PAGE_SIZE = 1< m_pArena; ///< arena that stores everything (all other pointers point here) PageDesc_t * m_pPages; ///< page descriptors int * m_pFreelistHeads; ///< free-list heads int * m_pTagCount; TagDesc_t * m_pTags; DWORD * m_pBasePtr; ///< base data storage pointer CSphString m_sError; #if ARENADEBUG protected: int * m_pTotalAllocs; int * m_pTotalBytes; public: void CheckFreelists (); #else inline void CheckFreelists () {} #endif // ARENADEBUG }; class tDocCollector : public tTester { CSphVector * m_dCollection; public: explicit tDocCollector ( CSphVector & dCollection ) : m_dCollection ( &dCollection ) {} virtual void Reset() { m_dCollection->Reset(); } virtual void TestData ( int iData ) { if ( !g_pMvaArena ) return; m_dCollection->Add ( *(SphDocID_t*)(g_pMvaArena + iData) ); } }; ////////////////////////////////////////////////////////////////////////// CSphArena::CSphArena () : m_iPages ( 0 ) { m_tThdMutex.Init(); } CSphArena::~CSphArena () { // notify callers that arena no longer exists g_pMvaArena = NULL; m_tThdMutex.Done(); } DWORD * CSphArena::ReInit ( int uMaxBytes ) { if ( m_iPages!=0 ) { m_pArena.Reset(); m_iPages = 0; } return Init ( uMaxBytes ); } DWORD * CSphArena::Init ( int uMaxBytes ) { m_iPages = ( uMaxBytes+PAGE_SIZE-1 ) / PAGE_SIZE; int iData = m_iPages*PAGE_SIZE; // data size, bytes int iMyTaglist = sizeof(int) + MAX_TAGS*sizeof(TagDesc_t); // int length, TagDesc_t[] tags; NOLINT int iMy = m_iPages*sizeof(PageDesc_t) + NUM_SIZES*sizeof(int) + iMyTaglist; // my internal structures size, bytes; NOLINT #if ARENADEBUG iMy += 2*sizeof(int); // debugging counters; NOLINT #endif assert ( iData%sizeof(DWORD)==0 ); assert ( iMy%sizeof(DWORD)==0 ); CSphString sError, sWarning; if ( m_tProcMutex.GetError() || !m_pArena.Alloc ( (iData+iMy)/sizeof(DWORD), sError, sWarning ) ) { m_iPages = 0; if ( m_tProcMutex.GetError() ) m_sError = m_tProcMutex.GetError(); else m_sError.SetSprintf ( "alloc, error='%s', warning='%s'", sError.cstr(), sWarning.cstr() ); return NULL; } // setup internal pointers DWORD * pCur = m_pArena.GetWritePtr(); m_pPages = (PageDesc_t*) pCur; pCur += sizeof(PageDesc_t)*m_iPages/sizeof(DWORD); m_pFreelistHeads = (int*) pCur; pCur += NUM_SIZES; // one for each size, and one extra for zero m_pTagCount = (int*) pCur++; m_pTags = (TagDesc_t*) pCur; pCur += sizeof(TagDesc_t)*MAX_TAGS/sizeof(DWORD); #if ARENADEBUG m_pTotalAllocs = (int*) pCur++; m_pTotalBytes = (int*) pCur++; *m_pTotalAllocs = 0; *m_pTotalBytes = 0; #endif m_pBasePtr = m_pArena.GetWritePtr() + iMy/sizeof(DWORD); assert ( m_pBasePtr==pCur ); // setup initial state for ( int i=0; i0 ) ? i-1 : -1; m_pPages[i].m_iNext = ( i( ( 1 << MAX_BITS ) - (int)sizeof(int) ) ) return -1; int iSizeBits = sphLog2 ( iBytes+sizeof(int)-1 ); // always reserve sizeof(int) for the tag; NOLINT iSizeBits = Max ( iSizeBits, MIN_BITS ); assert ( iSizeBits>=MIN_BITS && iSizeBits<=MAX_BITS ); int iSizeSlot = iSizeBits-MIN_BITS+1; assert ( iSizeSlot>=1 && iSizeSlot=0 ) { // got something in the free-list pPage = m_pPages + m_pFreelistHeads[iSizeSlot]; } else { // nothing in free-list, alloc next empty one if ( m_pFreelistHeads[0]<0 ) return -1; // out of memory // update the page pPage = m_pPages + m_pFreelistHeads[0]; assert ( pPage->m_iPrev==-1 ); m_pFreelistHeads[iSizeSlot] = m_pFreelistHeads[0]; m_pFreelistHeads[0] = pPage->m_iNext; if ( pPage->m_iNext>=0 ) m_pPages[pPage->m_iNext].m_iPrev = -1; pPage->m_iSizeBits = iSizeBits; pPage->m_iUsed = 0; pPage->m_iNext = -1; CheckFreelists (); // setup bitmap int iUsedBits = ( 1<<(MAX_BITS-iSizeBits) ); // max-used-bits = page-size/alloc-size = ( 1<0 && iUsedBits<=(PAGE_BITMAP<<5) ); for ( int i=0; im_uBitmap[i] = ( ( i<<5 )>=iUsedBits ) ? 0xffffffffUL : 0; if ( iUsedBits<32 ) pPage->m_uBitmap[0] = ( 0xffffffffUL<m_iSizeBits==iSizeBits ); for ( int i=0; im_uBitmap[i]==0xffffffffUL ) continue; int iFree = FindBit ( pPage->m_uBitmap[i] ); pPage->m_uBitmap[i] |= ( 1<m_iUsed++; if ( pPage->m_iUsed==( PAGE_SIZE >> pPage->m_iSizeBits ) ) { // this page is full now, unchain from the free-list assert ( m_pFreelistHeads[iSizeSlot]==pPage-m_pPages ); m_pFreelistHeads[iSizeSlot] = pPage->m_iNext; if ( pPage->m_iNext>=0 ) { assert ( m_pPages[pPage->m_iNext].m_iPrev==pPage-m_pPages ); m_pPages[pPage->m_iNext].m_iPrev = -1; } pPage->m_iNext = -1; } #if ARENADEBUG (*m_pTotalAllocs)++; (*m_pTotalBytes) += ( 1<m_iPages ) { assert ( 0 && "internal error, freed index out of arena" ); return; } PageDesc_t * pPage = m_pPages + iPage; int iBit = ( iOffset % PAGE_SIZE ) >> pPage->m_iSizeBits; assert ( ( iOffset % PAGE_SIZE )==( iBit << pPage->m_iSizeBits ) && "internal error, freed offset is unaligned" ); if (!( pPage->m_uBitmap[iBit>>5] & ( 1UL<<(iBit & 31) ) )) { assert ( 0 && "internal error, freed index already freed" ); return; } pPage->m_uBitmap[iBit>>5] &= ~( 1UL << ( iBit & 31 ) ); pPage->m_iUsed--; #if ARENADEBUG (*m_pTotalAllocs)--; (*m_pTotalBytes) -= ( 1<m_iSizeBits ); #endif CheckFreelists (); int iSizeSlot = pPage->m_iSizeBits-MIN_BITS+1; if ( pPage->m_iUsed==( PAGE_SIZE >> pPage->m_iSizeBits )-1 ) { // this page was full, but it's semi-free now // chain to free-list assert ( pPage->m_iPrev==-1 ); // full pages must not be in any list assert ( pPage->m_iNext==-1 ); pPage->m_iNext = m_pFreelistHeads[iSizeSlot]; if ( pPage->m_iNext>=0 ) { assert ( m_pPages[pPage->m_iNext].m_iPrev==-1 ); assert ( m_pPages[pPage->m_iNext].m_iSizeBits==pPage->m_iSizeBits ); m_pPages[pPage->m_iNext].m_iPrev = iPage; } m_pFreelistHeads[iSizeSlot] = iPage; } if ( pPage->m_iUsed==0 ) { // this page is empty now // unchain from free-list if ( pPage->m_iPrev>=0 ) { // non-head page assert ( m_pPages[pPage->m_iPrev].m_iNext==iPage ); m_pPages[pPage->m_iPrev].m_iNext = pPage->m_iNext; if ( pPage->m_iNext>=0 ) { assert ( m_pPages[pPage->m_iNext].m_iPrev==iPage ); m_pPages[pPage->m_iNext].m_iPrev = pPage->m_iPrev; } } else { // head page assert ( m_pFreelistHeads[iSizeSlot]==iPage ); assert ( pPage->m_iPrev==-1 ); if ( pPage->m_iNext>=0 ) { assert ( m_pPages[pPage->m_iNext].m_iPrev==iPage ); m_pPages[pPage->m_iNext].m_iPrev = -1; } m_pFreelistHeads[iSizeSlot] = pPage->m_iNext; } pPage->m_iSizeBits = 0; pPage->m_iPrev = -1; pPage->m_iNext = m_pFreelistHeads[0]; if ( pPage->m_iNext>=0 ) { assert ( m_pPages[pPage->m_iNext].m_iPrev==-1 ); assert ( m_pPages[pPage->m_iNext].m_iSizeBits==0 ); m_pPages[pPage->m_iNext].m_iPrev = iPage; } m_pFreelistHeads[0] = iPage; } CheckFreelists (); } int CSphArena::TaggedAlloc ( int iTag, int iBytes ) { if ( !m_iPages ) return -1; // uninitialized assert ( iTag>=0 ); CSphScopedLock tProcLock ( m_tProcMutex ); CSphScopedLock tThdLock ( m_tThdMutex ); // find that tag first TagDesc_t * pTag = sphBinarySearch ( m_pTags, m_pTags+(*m_pTagCount)-1, bind ( &TagDesc_t::m_iTag ), iTag ); if ( !pTag ) { if ( *m_pTagCount==MAX_TAGS ) return -1; // out of tags int iLogHead = RawAlloc ( sizeof(AllocsLogEntry_t) ); if ( iLogHead<0 ) return -1; // out of memory AllocsLogEntry_t * pLog = (AllocsLogEntry_t*) ( m_pBasePtr + iLogHead ); pLog->m_iUsed = 0; pLog->m_iNext = -1; // add new tag pTag = m_pTags + (*m_pTagCount)++; pTag->m_iTag = iTag; pTag->m_iAllocs = 0; pTag->m_iLogHead = iLogHead; // re-sort // OPTIMIZE! full-blown sort is overkill here sphSort ( m_pTags, *m_pTagCount, sphMemberLess ( &TagDesc_t::m_iTag ) ); // we must be able to find it now pTag = sphBinarySearch ( m_pTags, m_pTags+(*m_pTagCount)-1, bind ( &TagDesc_t::m_iTag ), iTag ); assert ( pTag && "internal error, fresh tag not found in TaggedAlloc()" ); if ( !pTag ) return -1; // internal error } // grow the log if needed AllocsLogEntry_t * pLog = (AllocsLogEntry_t*) ( m_pBasePtr + pTag->m_iLogHead ); if ( pLog->m_iUsed==MAX_LOGENTRIES ) { int iNewEntry = RawAlloc ( sizeof(AllocsLogEntry_t) ); if ( iNewEntry<0 ) return -1; // out of memory AllocsLogEntry_t * pNew = (AllocsLogEntry_t*) ( m_pBasePtr + iNewEntry ); pNew->m_iUsed = 0; pNew->m_iNext = pTag->m_iLogHead; pTag->m_iLogHead = iNewEntry; pLog = pNew; } // do the alloc itself int iIndex = RawAlloc ( iBytes ); if ( iIndex<0 ) return -1; // out of memory // tag it m_pBasePtr[iIndex-1] = iTag; // log it assert ( pLog->m_iUsedm_dEntries [ pLog->m_iUsed++ ] = iIndex; pTag->m_iAllocs++; // and we're done return iIndex; } void CSphArena::TaggedFreeIndex ( int iTag, int iIndex ) { if ( !m_iPages ) return; // uninitialized assert ( iTag>=0 ); CSphScopedLock tProcLock ( m_tProcMutex ); CSphScopedLock tThdLock ( m_tThdMutex ); // find that tag TagDesc_t * pTag = sphBinarySearch ( m_pTags, m_pTags+(*m_pTagCount)-1, bind ( &TagDesc_t::m_iTag ), iTag ); assert ( pTag && "internal error, unknown tag in TaggedFreeIndex()" ); assert ( m_pBasePtr[iIndex-1]==DWORD(iTag) && "internal error, tag mismatch in TaggedFreeIndex()" ); // defence against internal errors if ( !pTag ) return; // untag it m_pBasePtr[iIndex-1] = DWORD(-1); // free it RawFree ( iIndex ); // update the tag decsriptor pTag->m_iAllocs--; assert ( pTag->m_iAllocs>=0 ); // remove the descriptor if its empty now if ( pTag->m_iAllocs==0 ) RemoveTag ( pTag ); } void CSphArena::TaggedFreeTag ( int iTag ) { if ( !m_iPages ) return; // uninitialized assert ( iTag>=0 ); CSphScopedLock tProcLock ( m_tProcMutex ); CSphScopedLock tThdLock ( m_tThdMutex ); // find that tag TagDesc_t * pTag = sphBinarySearch ( m_pTags, m_pTags+(*m_pTagCount)-1, bind ( &TagDesc_t::m_iTag ), iTag ); if ( !pTag ) return; // walk the log and free it int iLog = pTag->m_iLogHead; while ( iLog>=0 ) { AllocsLogEntry_t * pLog = (AllocsLogEntry_t*) ( m_pBasePtr + iLog ); iLog = pLog->m_iNext; // free each alloc if tag still matches for ( int i=0; im_iUsed; i++ ) { int iIndex = pLog->m_dEntries[i]; if ( m_pBasePtr[iIndex-1]==DWORD(iTag) ) { m_pBasePtr[iIndex-1] = DWORD(-1); // avoid double free RawFree ( iIndex ); pTag->m_iAllocs--; } } } // check for mismatches assert ( pTag->m_iAllocs==0 ); // remove the descriptor RemoveTag ( pTag ); } void CSphArena::ExamineTag ( tTester* pTest, int iTag ) { if ( !pTest ) return; pTest->Reset(); if ( !m_iPages ) return; // uninitialized assert ( iTag>=0 ); CSphScopedLock tProcLock ( m_tProcMutex ); CSphScopedLock tThdLock ( m_tThdMutex ); // find that tag TagDesc_t * pTag = sphBinarySearch ( m_pTags, m_pTags+(*m_pTagCount)-1, bind ( &TagDesc_t::m_iTag ), iTag ); if ( !pTag ) return; // walk the log and tick it's chunks int iLog = pTag->m_iLogHead; while ( iLog>=0 ) { AllocsLogEntry_t * pLog = (AllocsLogEntry_t*) ( m_pBasePtr + iLog ); iLog = pLog->m_iNext; // tick each alloc for ( int i=0; im_iUsed; i++ ) pTest->TestData ( pLog->m_dEntries[i] ); } } void CSphArena::RemoveTag ( TagDesc_t * pTag ) { assert ( pTag ); assert ( pTag->m_iAllocs==0 ); // dealloc log chain int iLog = pTag->m_iLogHead; while ( iLog>=0 ) { AllocsLogEntry_t * pLog = (AllocsLogEntry_t*) ( m_pBasePtr + iLog ); int iNext = pLog->m_iNext; RawFree ( iLog ); iLog = iNext; } // remove tag from the list int iTail = m_pTags + (*m_pTagCount) - pTag - 1; memmove ( pTag, pTag+1, iTail*sizeof(TagDesc_t) ); (*m_pTagCount)--; } #if ARENADEBUG void CSphArena::CheckFreelists () { assert ( m_pFreelistHeads[0]==-1 || m_pPages[m_pFreelistHeads[0]].m_iSizeBits==0 ); for ( int iSizeSlot=1; iSizeSlot=0 && g_pMvaArena ) #else if ( m_iIndexTag>=0 && g_bHeadProcess && g_pMvaArena ) #endif g_MvaArena.TaggedFreeTag ( m_iIndexTag ); #if !USE_WINDOWS if ( g_bHeadProcess ) #endif Unlock(); } ///////////////////////////////////////////////////////////////////////////// int CSphIndex_VLN::UpdateAttributes ( const CSphAttrUpdate & tUpd, int iIndex, CSphString & sError ) { // check if we can if ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN ) { sError.SetSprintf ( "docinfo=extern required for updates" ); return -1; } assert ( tUpd.m_dDocids.GetLength()==0 || tUpd.m_dRows.GetLength()==0 ); DWORD uRows = Max ( tUpd.m_dDocids.GetLength(), tUpd.m_dRows.GetLength() ); bool bRaw = tUpd.m_dDocids.GetLength()==0; // check if we have to assert ( (int)uRows==tUpd.m_dRowOffset.GetLength() ); if ( !m_uDocinfo || !uRows ) return 0; if ( g_pBinlog ) g_pBinlog->BinlogUpdateAttributes ( &m_iTID, m_sIndexName.cstr(), tUpd ); // remap update schema to index schema CSphVector dLocators; CSphVector dIndexes; CSphVector dFloats; CSphVector dBigints; dLocators.Reserve ( tUpd.m_dAttrs.GetLength() ); dIndexes.Reserve ( tUpd.m_dAttrs.GetLength() ); dFloats.Reserve ( tUpd.m_dAttrs.GetLength() ); dBigints.Reserve ( tUpd.m_dAttrs.GetLength() ); // bigint flags for *source* schema. uint64_t uDst64 = 0; ARRAY_FOREACH ( i, tUpd.m_dAttrs ) { int iIndex = m_tSchema.GetAttrIndex ( tUpd.m_dAttrs[i].m_sName.cstr() ); if ( iIndex<0 ) { sError.SetSprintf ( "attribute '%s' not found", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } dBigints.Add ( tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_BIGINT ); // forbid updates on non-int columns const CSphColumnInfo & tCol = m_tSchema.GetAttr(iIndex); if (!( tCol.m_eAttrType==SPH_ATTR_BOOL || tCol.m_eAttrType==SPH_ATTR_INTEGER || tCol.m_eAttrType==SPH_ATTR_TIMESTAMP || tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET || tCol.m_eAttrType==SPH_ATTR_BIGINT || tCol.m_eAttrType==SPH_ATTR_FLOAT )) { sError.SetSprintf ( "attribute '%s' can not be updated (must be boolean, integer, bigint, float, timestamp, or MVA)", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } // forbid updates on MVA columns if there's no arena if ( ( tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET ) && !g_pMvaArena ) { sError.SetSprintf ( "MVA attribute '%s' can not be updated (MVA arena not initialized)", tCol.m_sName.cstr() ); return -1; } bool bSrcMva = ( tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET ); bool bDstMva = ( tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT32SET || tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ); if ( bSrcMva!=bDstMva ) { sError.SetSprintf ( "attribute '%s' MVA flag mismatch", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } if ( tCol.m_eAttrType==SPH_ATTR_UINT32SET && tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ) { sError.SetSprintf ( "attribute '%s' MVA bits (dst=%d, src=%d) mismatch", tUpd.m_dAttrs[i].m_sName.cstr(), tCol.m_eAttrType, tUpd.m_dAttrs[i].m_eAttrType ); return -1; } if ( tCol.m_eAttrType==SPH_ATTR_UINT64SET ) uDst64 |= ( U64C(1)<=0 && ( tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT32SET || tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ) ) iNumMVA++; // OPTIMIZE! execute the code below conditionally CSphVector dRowPtrs; CSphVector dMvaPtrs; dRowPtrs.Resize ( uRows ); dMvaPtrs.Resize ( uRows*iNumMVA ); dMvaPtrs.Fill ( -1 ); // preallocate bool bFailed = false; for ( int iUpd=iFirst; iUpd ( bRaw ? tUpd.m_dRows[iUpd] : FindDocinfo ( tUpd.m_dDocids[iUpd] ) ); if ( !dRowPtrs[iUpd] ) continue; // no such id int iPoolPos = tUpd.m_dRowOffset[iUpd]; int iMvaPtr = iUpd*iNumMVA; ARRAY_FOREACH_COND ( iCol, tUpd.m_dAttrs, !bFailed ) { bool bSrcMva32 = ( tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT32SET ); bool bSrcMva64 = ( tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT64SET ); if (!( bSrcMva32 || bSrcMva64 )) // FIXME! optimize using a prebuilt dword mask? { iPoolPos++; if ( dBigints[iCol] ) iPoolPos++; continue; } // get the requested new count int iNewCount = (int)tUpd.m_dPool[iPoolPos++]; iPoolPos += iNewCount; // try to alloc if ( dIndexes[iCol]>=0 ) { int iAlloc = -1; if ( iNewCount ) { bool bDst64 = ( uDst64 & ( U64C(1) << iCol ) )!=0; assert ( (iNewCount%2)==0 ); int iLen = ( bDst64 ? iNewCount : iNewCount/2 ); iAlloc = g_MvaArena.TaggedAlloc ( m_iIndexTag, (1+iLen)*sizeof(DWORD)+sizeof(SphDocID_t) ); if ( iAlloc<0 ) bFailed = true; } // whatever the outcome, move the pointer dMvaPtrs[iMvaPtr++] = iAlloc; } } } // if there were any allocation failures, rollback everything if ( bFailed ) { ARRAY_FOREACH ( i, dMvaPtrs ) if ( dMvaPtrs[i]>=0 ) g_MvaArena.TaggedFreeIndex ( m_iIndexTag, dMvaPtrs[i] ); sError.SetSprintf ( "out of pool memory on MVA update" ); return -1; } // preallocation went OK; do the actual update int iRowStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); int iUpdated = 0; DWORD uUpdateMask = 0; for ( int iUpd=iFirst; iUpd ( &m_pDocinfoIndex[2*iBlock*iRowStride] ); DWORD * pIndexRanges = const_cast < DWORD * > ( &m_pDocinfoIndex[2*m_uDocinfoIndex*iRowStride] ); assert ( iBlock>=0 && iBlock<(int)m_uDocinfoIndex ); assert ( bRaw || ( DOCINFO2ID(pEntry)==tUpd.m_dDocids[iUpd] ) ); pEntry = DOCINFO2ATTRS(pEntry); int iPos = tUpd.m_dRowOffset[iUpd]; int iMvaPtr = iUpd*iNumMVA; ARRAY_FOREACH ( iCol, tUpd.m_dAttrs ) { bool bSrcMva32 = ( tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT32SET ); bool bSrcMva64 = ( tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT64SET ); if (!( bSrcMva32 || bSrcMva64 )) // FIXME! optimize using a prebuilt dword mask? { // plain update if ( dIndexes[iCol]>=0 ) { SphAttr_t uValue = dBigints[iCol] ? MVA_UPSIZE ( &tUpd.m_dPool[iPos] ) : tUpd.m_dPool[iPos]; sphSetRowAttr ( pEntry, dLocators[iCol], uValue ); // update block and index ranges for ( int i=0; i<2; i++ ) { DWORD * pBlock = i ? pBlockRanges : pIndexRanges; SphAttr_t uMin = sphGetRowAttr ( DOCINFO2ATTRS ( pBlock ), dLocators[iCol] ); SphAttr_t uMax = sphGetRowAttr ( DOCINFO2ATTRS ( pBlock+iRowStride ) , dLocators[iCol] ); if ( dFloats[iCol] ) // update float's indexes assumes float comparision { float fValue = sphDW2F ( (DWORD) uValue ); float fMin = sphDW2F ( (DWORD) uMin ); float fMax = sphDW2F ( (DWORD) uMax ); if ( fValuefMax ) sphSetRowAttr ( DOCINFO2ATTRS ( pBlock+iRowStride ), dLocators[iCol], sphF2DW ( fValue ) ); } else // update usual integers { if ( uValueuMax ) sphSetRowAttr ( DOCINFO2ATTRS ( pBlock+iRowStride ), dLocators[iCol], uValue ); } } uUpdateMask |= ATTRS_UPDATED; } iPos += dBigints[iCol]?2:1; continue; } // MVA update DWORD uOldIndex = MVA_DOWNSIZE ( sphGetRowAttr ( pEntry, dLocators[iCol] ) ); // get new count, store new data if needed DWORD uNew = tUpd.m_dPool[iPos++]; const DWORD * pSrc = tUpd.m_dPool.Begin() + iPos; iPos += uNew; if ( dIndexes[iCol]>=0 ) { uint64_t uNewMin = LLONG_MAX, uNewMax = 0; int iNewIndex = dMvaPtrs[iMvaPtr++]; SphDocID_t* pDocid = (SphDocID_t*)(g_pMvaArena + iNewIndex); *pDocid++ = bRaw ? DOCINFO2ID ( tUpd.m_dRows[iUpd] ) : tUpd.m_dDocids[iUpd]; iNewIndex = (DWORD*)pDocid - g_pMvaArena; if ( uNew ) { assert ( iNewIndex>=0 ); DWORD * pDst = g_pMvaArena + iNewIndex; bool bDst64 = ( uDst64 & ( U64C(1) << iCol ) )!=0; assert ( ( uNew%2 )==0 ); int iLen = ( bDst64 ? uNew : uNew/2 ); // setup new value (flagged index) to store within row uNew = DWORD(iNewIndex) | MVA_ARENA_FLAG; // MVA values counter first *pDst++ = iLen; if ( bDst64 ) { while ( iLen ) { uint64_t uValue = MVA_UPSIZE ( pSrc ); uNewMin = Min ( uNewMin, uValue ); uNewMax = Max ( uNewMax, uValue ); *pDst++ = *pSrc++; *pDst++ = *pSrc++; iLen -= 2; } } else { while ( iLen-- ) { DWORD uValue = *pSrc; pSrc += 2; *pDst++ = uValue; uNewMin = Min ( uNewMin, uValue ); uNewMax = Max ( uNewMax, uValue ); } } } // store new value sphSetRowAttr ( pEntry, dLocators[iCol], uNew ); // update block and index ranges if ( uNew ) for ( int i=0; i<2; i++ ) { DWORD * pBlock = i ? pBlockRanges : pIndexRanges; uint64_t uMin = sphGetRowAttr ( DOCINFO2ATTRS ( pBlock ), dLocators[iCol] ); uint64_t uMax = sphGetRowAttr ( DOCINFO2ATTRS ( pBlock+iRowStride ), dLocators[iCol] ); if ( uNewMinuMax ) { sphSetRowAttr ( DOCINFO2ATTRS ( pBlock ), dLocators[iCol], Min ( uMin, uNewMin ) ); sphSetRowAttr ( DOCINFO2ATTRS ( pBlock+iRowStride ), dLocators[iCol], Max ( uMax, uNewMax ) ); } } // free old storage if needed if ( uOldIndex & MVA_ARENA_FLAG ) { uOldIndex = ((DWORD*)((SphDocID_t*)(g_pMvaArena + (uOldIndex & MVA_OFFSET_MASK))-1))-g_pMvaArena; g_MvaArena.TaggedFreeIndex ( m_iIndexTag, uOldIndex ); } uUpdateMask |= ATTRS_MVA_UPDATED; } } iUpdated++; } *m_pAttrsStatus |= uUpdateMask; // FIXME! add lock/atomic? return iUpdated; } bool CSphIndex_VLN::LoadPersistentMVA ( CSphString & sError ) { // prepare the file to load CSphAutoreader fdReader; if ( !fdReader.Open ( GetIndexFileName("mvp"), m_sLastError ) ) { // no mvp means no saved attributes. m_sLastError = ""; return true; } // check if we can if ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN ) { sError.SetSprintf ( "docinfo=extern required for updates" ); return false; } DWORD uDocs = fdReader.GetDword(); // if we have docs to update if ( !uDocs ) return false; CSphVector dAffected ( uDocs ); fdReader.GetBytes ( &dAffected[0], uDocs*sizeof(SphDocID_t) ); // collect the indexes of MVA schema attributes CSphVector dMvaLocators; for ( int i=0; i ( FindDocinfo ( dAffected[iDoc] ) ); assert ( pDocinfo ); DWORD * pAttrs = DOCINFO2ATTRS ( pDocinfo ); ARRAY_FOREACH ( iMva, dMvaLocators ) { // reset MVA from arena if ( MVA_DOWNSIZE ( sphGetRowAttr ( pAttrs, dMvaLocators[iMva] ) ) & MVA_ARENA_FLAG ) sphSetRowAttr ( pAttrs, dMvaLocators[iMva], 0 ); } } sphWarning ( "index '%s' forced to reset persistent MVAs ( %s )", m_sIndexName.cstr(), g_MvaArena.GetError() ); fdReader.Close(); return true; } CSphVector dRowPtrs ( uDocs ); CSphVector dAllocs; dAllocs.Reserve ( uDocs ); // prealloc values (and also preload) bool bFailed = false; ARRAY_FOREACH ( i, dAffected ) { DWORD* pDocinfo = const_cast ( FindDocinfo ( dAffected[i] ) ); assert ( pDocinfo ); pDocinfo = DOCINFO2ATTRS ( pDocinfo ); ARRAY_FOREACH_COND ( j, dMvaLocators, !bFailed ) { // if this MVA was updated if ( MVA_DOWNSIZE ( sphGetRowAttr ( pDocinfo, dMvaLocators[j] ) ) & MVA_ARENA_FLAG ) { DWORD uCount = fdReader.GetDword(); if ( uCount ) { assert ( j dMvaLocators; for ( int i=0; i dAffected; { tDocCollector dCollect ( dAffected ); g_MvaArena.ExamineTag ( &dCollect, m_iIndexTag ); } dAffected.Uniq(); if ( !dAffected.GetLength() ) break; // prepare the file to save into; CSphWriter fdFlushMVA; fdFlushMVA.OpenFile ( GetIndexFileName("mvp.tmpnew"), m_sLastError ); if ( fdFlushMVA.IsError() ) return false; // save the vector of affected docids DWORD uPos = dAffected.GetLength(); fdFlushMVA.PutDword ( uPos ); fdFlushMVA.PutBytes ( &dAffected[0], uPos*sizeof(SphDocID_t) ); // save the updated MVA vectors ARRAY_FOREACH ( i, dAffected ) { DWORD* pDocinfo = const_cast ( FindDocinfo ( dAffected[i] ) ); assert ( pDocinfo ); pDocinfo = DOCINFO2ATTRS ( pDocinfo ); ARRAY_FOREACH ( j, dMvaLocators ) { DWORD uOldIndex = MVA_DOWNSIZE ( sphGetRowAttr ( pDocinfo, dMvaLocators[j] ) ); // if this MVA was updated if ( uOldIndex & MVA_ARENA_FLAG ) { DWORD * pMva = g_pMvaArena + ( uOldIndex & MVA_OFFSET_MASK ); DWORD uCount = *pMva; assert ( j=20 ) uSize += 2*(1+m_uDocinfoIndex)*uStride*sizeof(CSphRowitem); if ( !sphWriteThrottled ( fdTmpnew.GetFD(), m_pDocinfo.GetWritePtr(), uSize, "docinfo", m_sLastError ) ) return false; fdTmpnew.Close (); if ( !JuggleFile("spa") ) return false; if ( g_pBinlog ) g_pBinlog->NotifyIndexFlush ( m_sIndexName.cstr(), m_iTID, false ); if ( *m_pAttrsStatus==uAttrStatus ) *m_pAttrsStatus = 0; sphLogDebugvv ( "index '%s' attrs (%d) saved", m_sIndexName.cstr(), *m_pAttrsStatus ); return true; } DWORD CSphIndex_VLN::GetAttributeStatus () const { assert ( m_pAttrsStatus ); return *m_pAttrsStatus; } ///////////////////////////////////////////////////////////////////////////// #define SPH_CMPHIT_LESS(a,b) \ ( a.m_iWordID> 23 ); m_iLastHitlistPos -= m_iLastHitlistDelta; assert ( m_iLastHitlistPos>=0 ); } else { m_wrDoclist.ZipInt ( m_dLastDocFields.GetMask32() ); m_wrDoclist.ZipOffset ( m_iLastHitlistDelta ); } } else // plain format - finish doclist entry { assert ( m_tSettings.m_eHitFormat==SPH_HIT_FORMAT_PLAIN ); m_wrDoclist.ZipOffset ( m_iLastHitlistDelta ); m_wrDoclist.ZipInt ( m_dLastDocFields.GetMask32() ); m_wrDoclist.ZipInt ( m_uLastDocHits ); } m_dLastDocFields.Unset(); m_uLastDocHits = 0; } void CSphIndex_VLN::cidxHit ( CSphAggregateHit * hit, CSphRowitem * pAttrs ) { assert ( ( hit->m_iWordID!=0 && hit->m_iWordPos!=EMPTY_HIT && hit->m_iDocID!=0 ) || // it's either ok hit ( hit->m_iWordID==0 && hit->m_iWordPos==EMPTY_HIT ) ); // or "flush-hit" ///////////// // next word ///////////// bool bNextWord = ( m_tLastHit.m_iWordID!=hit->m_iWordID || ( m_bWordDict && strcmp ( (char*)m_tLastHit.m_sKeyword, (char*)hit->m_sKeyword ) ) ); // OPTIMIZE? bool bNextDoc = bNextWord || ( m_tLastHit.m_iDocID!=hit->m_iDocID ); if ( bNextDoc ) { // finish hitlist, if any Hitpos_t uLastPos = m_tLastHit.m_iWordPos; if ( m_tLastHit.m_iWordPos!=EMPTY_HIT ) { m_wrHitlist.ZipInt ( 0 ); m_tLastHit.m_iWordPos = EMPTY_HIT; } // finish doclist entry, if any if ( m_tLastHit.m_iDocID ) cidxFinishDoclistEntry ( uLastPos ); } if ( bNextWord ) { // finish doclist, if any if ( m_tLastHit.m_iDocID ) { // emit end-of-doclist marker m_wrDoclist.ZipInt ( 0 ); // emit dict entry m_pDict->DictEntry ( m_tLastHit.m_iWordID, m_tLastHit.m_sKeyword, m_iLastWordDocs, m_iLastWordHits, m_iLastWordDoclist, m_wrDoclist.GetPos()-m_iLastWordDoclist ); // reset trackers m_iLastWordDocs = 0; m_iLastWordHits = 0; m_tLastHit.m_iDocID = 0; m_iLastHitlistPos = 0; } // flush wordlist, if this is the end if ( hit->m_iWordPos==EMPTY_HIT ) { m_pDict->DictEndEntries ( m_wrDoclist.GetPos() ); return; } assert ( hit->m_iWordID > m_tLastHit.m_iWordID || ( m_bWordDict && hit->m_iWordID==m_tLastHit.m_iWordID && strcmp ( (char*)hit->m_sKeyword, (char*)m_tLastHit.m_sKeyword )>0 ) || m_bMerging ); m_iLastWordDoclist = m_wrDoclist.GetPos(); m_tLastHit.m_iWordID = hit->m_iWordID; if ( m_bWordDict ) { assert ( strlen ( (char *)hit->m_sKeyword )m_sKeyword, sizeof(m_sLastKeyword) ); // OPTIMIZE? } } if ( bNextDoc ) { // begin new doclist entry for new doc id assert ( hit->m_iDocID>m_tLastHit.m_iDocID ); assert ( m_wrHitlist.GetPos()>=m_iLastHitlistPos ); m_wrDoclist.ZipOffset ( hit->m_iDocID - m_tLastHit.m_iDocID ); if ( pAttrs ) { for ( int i=0; im_pDynamic[i] ); } m_iLastHitlistDelta = m_wrHitlist.GetPos() - m_iLastHitlistPos; m_tLastHit.m_iDocID = hit->m_iDocID; m_iLastHitlistPos = m_wrHitlist.GetPos(); // update per-word stats m_iLastWordDocs++; } /////////// // the hit /////////// if ( !hit->m_dFieldMask.TestAll(false) ) // merge aggregate hits into the current hit { int iHitCount = hit->GetAggrCount(); assert ( m_tSettings.m_eHitless ); assert ( iHitCount ); assert ( !hit->m_dFieldMask.TestAll(false) ); m_uLastDocHits += iHitCount; m_dLastDocFields |= hit->m_dFieldMask; m_iLastWordHits += iHitCount; if ( m_tSettings.m_eHitless==SPH_HITLESS_SOME ) m_iLastWordDocs |= 0x80000000; } else // handle normal hits { // add hit delta if ( hit->m_iWordPos==m_tLastHit.m_iWordPos ) return; assert ( m_tLastHit.m_iWordPos < hit->m_iWordPos ); m_wrHitlist.ZipInt ( hit->m_iWordPos - m_tLastHit.m_iWordPos ); m_tLastHit.m_iWordPos = hit->m_iWordPos; m_iLastWordHits++; // update matched fields mask m_dLastDocFields.Set ( HITMAN::GetField ( hit->m_iWordPos ) ); m_uLastDocHits++; } } static void ReadSchemaColumn ( CSphReader & rdInfo, CSphColumnInfo & tCol, DWORD uVersion ) { tCol.m_sName = rdInfo.GetString (); if ( tCol.m_sName.IsEmpty () ) tCol.m_sName = "@emptyname"; tCol.m_sName.ToLower (); tCol.m_eAttrType = (ESphAttr) rdInfo.GetDword (); // FIXME? check/fixup? if ( uVersion>=5 ) // m_uVersion for searching { rdInfo.GetDword (); // ignore rowitem tCol.m_tLocator.m_iBitOffset = rdInfo.GetDword (); tCol.m_tLocator.m_iBitCount = rdInfo.GetDword (); } else { tCol.m_tLocator.m_iBitOffset = -1; tCol.m_tLocator.m_iBitCount = -1; } if ( uVersion>=16 ) // m_uVersion for searching tCol.m_bPayload = ( rdInfo.GetByte()!=0 ); // WARNING! max version used here must be in sync with RtIndex_t::Prealloc } void ReadSchema ( CSphReader & rdInfo, CSphSchema & m_tSchema, DWORD uVersion, bool bDynamic ) { m_tSchema.Reset (); m_tSchema.m_dFields.Resize ( rdInfo.GetDword() ); ARRAY_FOREACH ( i, m_tSchema.m_dFields ) ReadSchemaColumn ( rdInfo, m_tSchema.m_dFields[i], uVersion ); int iNumAttrs = rdInfo.GetDword(); for ( int i=0; im_iDocID ); // was dword in v.1 if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) fdInfo.PutBytes ( m_pMin->m_pDynamic, m_tSchema.GetRowSize()*sizeof(CSphRowitem) ); // wordlist checkpoints fdInfo.PutOffset ( iCheckpointsPos ); fdInfo.PutDword ( iCheckpointCount ); // index stats fdInfo.PutDword ( m_tStats.m_iTotalDocuments ); fdInfo.PutOffset ( m_tStats.m_iTotalBytes ); // index settings SaveIndexSettings ( fdInfo, m_tSettings ); // tokenizer info assert ( m_pTokenizer ); SaveTokenizerSettings ( fdInfo, m_pTokenizer ); // dictionary info assert ( m_pDict ); SaveDictionarySettings ( fdInfo, m_pDict, false ); fdInfo.PutDword ( m_iKillListSize ); fdInfo.PutDword ( m_uMinMaxIndex ); return true; } bool CSphIndex_VLN::cidxDone ( const char * sHeaderExtension, int iMemLimit ) { // flush wordlist checkpoints SphOffset_t iCheckpointsPos; int iCheckpointsCount; if ( !m_pDict->DictEnd ( &iCheckpointsPos, &iCheckpointsCount, iMemLimit, m_sLastError ) ) return false; ///////////////// // create header ///////////////// CSphWriter fdInfo; fdInfo.OpenFile ( GetIndexFileName ( sHeaderExtension ), m_sLastError ); if ( fdInfo.IsError() ) return false; if ( !WriteHeader ( fdInfo, iCheckpointsPos, iCheckpointsCount ) ) return false; //////////////////////// // close all data files //////////////////////// fdInfo.CloseFile (); m_wrDoclist.CloseFile (); m_wrHitlist.CloseFile ( true ); if ( fdInfo.IsError() || m_pDict->DictIsError() || m_wrDoclist.IsError() || m_wrHitlist.IsError() ) return false; return true; } inline int encodeVLB ( BYTE * buf, DWORD v ) { register BYTE b; register int n = 0; do { b = (BYTE)(v & 0x7f); v >>= 7; if ( v ) b |= 0x80; *buf++ = b; n++; } while ( v ); return n; } inline int encodeVLB8 ( BYTE * buf, uint64_t v ) { register BYTE b; register int n = 0; do { b = (BYTE)(v & 0x7f); v >>= 7; if ( v ) b |= 0x80; *buf++ = b; n++; } while ( v ); return n; } inline int encodeKeyword ( BYTE * pBuf, const char * pKeyword ) { int iLen = strlen ( pKeyword ); // OPTIMIZE! remove this and memcpy and check if thats faster assert ( iLen>0 && iLen<128 ); // so that ReadVLB() *pBuf = (BYTE) iLen; memcpy ( pBuf+1, pKeyword, iLen ); return 1+iLen; } int CSphIndex_VLN::cidxWriteRawVLB ( int fd, CSphWordHit * pHit, int iHits, DWORD * pDocinfo, int iDocinfos, int iStride ) { PROFILE ( write_hits ); assert ( pHit ); assert ( iHits>0 ); ///////////////////////////// // do simple bitwise hashing ///////////////////////////// static const int HBITS = 11; static const int HSIZE = ( 1 << HBITS ); SphDocID_t iStartID = 0; int dHash [ HSIZE+1 ]; int iShift = 0; if ( pDocinfo ) { iStartID = DOCINFO2ID ( pDocinfo ); int iBits = sphLog2 ( DOCINFO2ID ( pDocinfo + (iDocinfos-1)*iStride ) - iStartID ); iShift = ( iBits> iShift ); assert ( iHash>=0 && iHashiHashed ) { dHash [ iHashed+1 ] = i-1; // right boundary for prev hash value dHash [ iHash ] = i; // left boundary for next hash value iHashed = iHash; } } dHash [ iHashed+1 ] = iDocinfos-1; // right boundary for last hash value } /////////////////////////////////////// // encode through a small write buffer /////////////////////////////////////// BYTE *pBuf, *maxP; int n = 0, w; SphWordID_t d1, l1 = 0; SphDocID_t d2, l2 = 0; DWORD d3, l3 = 0; // !COMMIT must be wide enough bool bWordDict = m_pDict->GetSettings().m_bWordDict; int iGap = Max ( 128, 16*sizeof(DWORD) + iStride*sizeof(DWORD) + ( bWordDict ? MAX_KEYWORD_BYTES : 0 ) ); pBuf = m_pWriteBuffer; maxP = m_pWriteBuffer + m_iWriteBuffer - iGap; SphDocID_t iAttrID = 0; // current doc id DWORD * pAttrs = NULL; // current doc attrs // hit aggregation state DWORD uHitCount = 0; DWORD uHitFieldMask = 0; const int iPositionShift = m_tSettings.m_eHitless==SPH_HITLESS_SOME ? 1 : 0; while ( iHits-- ) { // find attributes by id if ( pDocinfo && iAttrID!=pHit->m_iDocID ) { int iHash = (int)( ( pHit->m_iDocID - iStartID ) >> iShift ); assert ( iHash>=0 && iHashm_iDocID==DOCINFO2ID ( pDocinfo + iStart*iStride ) ) { pAttrs = DOCINFO2ATTRS ( pDocinfo + iStart*iStride ); } else if ( pHit->m_iDocID==DOCINFO2ID ( pDocinfo + iEnd*iStride ) ) { pAttrs = DOCINFO2ATTRS ( pDocinfo + iEnd*iStride ); } else { pAttrs = NULL; while ( iEnd-iStart>1 ) { // check if nothing found if ( pHit->m_iDocID < DOCINFO2ID ( pDocinfo + iStart*iStride ) || pHit->m_iDocID > DOCINFO2ID ( pDocinfo + iEnd*iStride ) ) break; assert ( pHit->m_iDocID > DOCINFO2ID ( pDocinfo + iStart*iStride ) ); assert ( pHit->m_iDocID < DOCINFO2ID ( pDocinfo + iEnd*iStride ) ); int iMid = iStart + (iEnd-iStart)/2; if ( pHit->m_iDocID==DOCINFO2ID ( pDocinfo + iMid*iStride ) ) { pAttrs = DOCINFO2ATTRS ( pDocinfo + iMid*iStride ); break; } if ( pHit->m_iDocIDm_iDocID ); iAttrID = pHit->m_iDocID; } // calc deltas d1 = pHit->m_iWordID - l1; d2 = pHit->m_iDocID - l2; d3 = pHit->m_iWordPos - l3; // ignore duplicate hits if ( d1==0 && d2==0 && d3==0 ) // OPTIMIZE? check if ( 0==(d1|d2|d3) ) is faster { pHit++; continue; } // non-zero delta restarts all the fields after it // because their deltas might now be negative if ( d1 ) d2 = pHit->m_iDocID; if ( d2 ) d3 = pHit->m_iWordPos; // when we moved to the next word or document bool bFlushed = false; if ( d1 || d2 ) { // flush previous aggregate hit if ( uHitCount ) { // we either skip all hits or the high bit must be available for marking // failing that, we can't produce a consistent index assert ( m_tSettings.m_eHitless!=SPH_HITLESS_NONE ); assert ( m_tSettings.m_eHitless==SPH_HITLESS_ALL || !( uHitCount & 0x80000000UL ) ); if ( m_tSettings.m_eHitless!=SPH_HITLESS_ALL ) uHitCount = ( uHitCount << 1 ) | 1; pBuf += encodeVLB ( pBuf, uHitCount ); pBuf += encodeVLB ( pBuf, uHitFieldMask ); assert ( pBufm_iWordID ) ) ) { uHitCount = 1; uHitFieldMask |= 1 << HITMAN::GetField ( pHit->m_iWordPos ); } } else if ( uHitCount ) // next hit for the same word/doc pair, update state if we need it { uHitCount++; uHitFieldMask |= 1 << HITMAN::GetField ( pHit->m_iWordPos ); } // encode enough restart markers if ( d1 ) pBuf += encodeVLB ( pBuf, 0 ); if ( d2 && !bFlushed ) pBuf += encodeVLB ( pBuf, 0 ); assert ( pBufHitblockGetKeyword ( pHit->m_iWordID ) ); // keyword itself in case of keywords dict else pBuf += LOC_ENCODE ( pBuf, d1 ); // delta in case of CRC dict assert ( pBufm_iWordID; l2 = pHit->m_iDocID; l3 = pHit->m_iWordPos; pHit++; if ( pBuf>maxP ) { w = (int)(pBuf - m_pWriteBuffer); assert ( w b.m_iWordID ) return false; if ( a.m_sKeyword ) { int iCmp = strcmp ( (char*)a.m_sKeyword, (char*)b.m_sKeyword ); // OPTIMIZE? if ( iCmp!=0 ) return ( iCmp<0 ); } return ( a.m_iDocID < b.m_iDocID ) || ( a.m_iDocID==b.m_iDocID && a.m_iWordPos0 ); m_iSize = iSize; m_iUsed = 0; m_pData = new CSphHitQueueEntry [ iSize ]; } /// destroy queue ~CSphHitQueue () { SafeDeleteArray ( m_pData ); } /// add entry to the queue void Push ( CSphAggregateHit & tHit, int iBin ) { // check for overflow and do add assert ( m_iUsed> 1; if ( SPH_CMPAGGRHIT_LESS ( m_pData[iEntry], m_pData[iParent] ) ) { // entry is less than parent, should float to the top Swap ( m_pData[iEntry], m_pData[iParent] ); iEntry = iParent; } else { break; } } } /// remove root (ie. top priority) entry void Pop () { assert ( m_iUsed ); if ( !(--m_iUsed) ) // empty queue? just return return; // make the last entry my new root m_pData[0] = m_pData[m_iUsed]; // sift down if needed int iEntry = 0; for ( ;; ) { // select child int iChild = (iEntry<<1) + 1; if ( iChild>=m_iUsed ) break; // select smallest child if ( iChild+1 & dSources, CSphAutoArray & dHits, int iArenaSize, int iFieldFD, int nFieldMVAs, int iFieldMVAInPool ) { // initialize writer (data file must always exist) CSphWriter wrMva; if ( !wrMva.OpenFile ( GetIndexFileName("spm"), m_sLastError ) ) return false; // calcs and checks bool bOnlyFieldMVAs = true; CSphVector dMvaIndexes; for ( int i=0; i dBlockLens; dBlockLens.Reserve ( 1024 ); m_tProgress.m_ePhase = CSphIndexProgress::PHASE_COLLECT_MVA; if ( !bOnlyFieldMVAs ) { ARRAY_FOREACH ( iSource, dSources ) { CSphSource * pSource = dSources[iSource]; if ( !pSource->Connect ( m_sLastError ) ) return false; ARRAY_FOREACH ( i, dMvaIndexes ) { int iAttr = dMvaIndexes[i]; const CSphColumnInfo & tAttr = m_tSchema.GetAttr(iAttr); if ( tAttr.m_eSrc==SPH_ATTRSRC_FIELD ) continue; if ( !pSource->IterateMultivaluedStart ( iAttr, m_sLastError ) ) return false; while ( pSource->IterateMultivaluedNext () ) { pMva->m_uDocID = pSource->m_tDocInfo.m_iDocID; pMva->m_iAttr = i; if ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET ) { pMva->m_uValue = pSource->m_dMva[0]; } else { pMva->m_uValue = MVA_UPSIZE ( pSource->m_dMva.Begin() ); } if ( ++pMva>=pMvaMax ) { sphSort ( pMvaPool, pMva-pMvaPool ); if ( !sphWriteThrottled ( fdTmpMva.GetFD(), pMvaPool, (pMva-pMvaPool)*sizeof(MvaEntry_t), "temp_mva", m_sLastError ) ) return false; dBlockLens.Add ( pMva-pMvaPool ); m_tProgress.m_iAttrs += pMva-pMvaPool; pMva = pMvaPool; if ( m_pProgress ) m_pProgress ( &m_tProgress, false ); } } } pSource->Disconnect (); } if ( pMva>pMvaPool ) { sphSort ( pMvaPool, pMva-pMvaPool ); if ( !sphWriteThrottled ( fdTmpMva.GetFD(), pMvaPool, (pMva-pMvaPool)*sizeof(MvaEntry_t), "temp_mva", m_sLastError ) ) return false; dBlockLens.Add ( pMva-pMvaPool ); m_tProgress.m_iAttrs += pMva-pMvaPool; pMva = pMvaPool; } } if ( m_pProgress ) m_pProgress ( &m_tProgress, true ); /////////////////////////// // free memory for sorting /////////////////////////// dHits.Reset (); ////////////// // fully sort ////////////// if ( m_pProgress ) { m_tProgress.m_ePhase = CSphIndexProgress::PHASE_SORT_MVA; m_tProgress.m_iAttrs = m_tProgress.m_iAttrs + nFieldMVAs; m_tProgress.m_iAttrsTotal = m_tProgress.m_iAttrs; m_pProgress ( &m_tProgress, false ); } int nLastBlockFieldMVAs = iFieldMVAInPool ? ( nFieldMVAs % iFieldMVAInPool ) : 0; int nFieldBlocks = iFieldMVAInPool ? ( nFieldMVAs / iFieldMVAInPool + ( nLastBlockFieldMVAs ? 1 : 0 ) ) : 0; // initialize readers CSphVector dBins; dBins.Reserve ( dBlockLens.GetLength() + nFieldBlocks ); int iBinSize = CSphBin::CalcBinSize ( iArenaSize, dBlockLens.GetLength() + nFieldBlocks, "sort_mva" ); SphOffset_t iSharedOffset = -1; ARRAY_FOREACH ( i, dBlockLens ) { dBins.Add ( new CSphBin() ); dBins[i]->m_iFileLeft = dBlockLens[i]*sizeof(MvaEntry_t); dBins[i]->m_iFilePos = ( i==0 ) ? 0 : dBins[i-1]->m_iFilePos + dBins[i-1]->m_iFileLeft; dBins[i]->Init ( fdTmpMva.GetFD(), &iSharedOffset, iBinSize ); } SphOffset_t iSharedFieldOffset = -1; SphOffset_t uStart = 0; for ( int i = 0; i < nFieldBlocks; i++ ) { dBins.Add ( new CSphBin() ); int iBin = dBins.GetLength () - 1; dBins[iBin]->m_iFileLeft = ( i==nFieldBlocks-1 ? ( nLastBlockFieldMVAs ? nLastBlockFieldMVAs : iFieldMVAInPool ): iFieldMVAInPool ) * sizeof(MvaEntry_t); dBins[iBin]->m_iFilePos = uStart; dBins[iBin]->Init ( iFieldFD, &iSharedFieldOffset, iBinSize ); uStart += dBins [iBin]->m_iFileLeft; } // do the sort CSphQueue < MvaEntryTag_t, MvaEntryCmp_fn > qMva ( Max ( 1, dBins.GetLength() ) ); ARRAY_FOREACH ( i, dBins ) { MvaEntryTag_t tEntry; if ( dBins[i]->ReadBytes ( (MvaEntry_t*) &tEntry, sizeof(MvaEntry_t) )!=BIN_READ_OK ) { m_sLastError.SetSprintf ( "sort_mva: warmup failed (io error?)" ); return false; } tEntry.m_iTag = i; qMva.Push ( tEntry ); } // spm-file := info-list [ 0+ ] // info-list := docid, values-list [ index.schema.mva-count ] // values-list := values-count, value [ values-count ] // note that mva32 come first then mva64 SphDocID_t uCurID = 0; CSphVector < CSphVector > dCurInfo; dCurInfo.Resize ( dMvaIndexes.GetLength() ); for ( ;; ) { // flush previous per-document info-list if ( !qMva.GetLength() || qMva.Root().m_uDocID!=uCurID ) { if ( uCurID ) { wrMva.PutDocid ( uCurID ); ARRAY_FOREACH ( i, dCurInfo ) { int iLen = dCurInfo[i].GetLength(); if ( i>=iMva64 ) { wrMva.PutDword ( iLen*2 ); wrMva.PutBytes ( dCurInfo[i].Begin(), sizeof(uint64_t)*iLen ); } else { wrMva.PutDword ( iLen ); ARRAY_FOREACH ( iVal, dCurInfo[i] ) { wrMva.PutDword ( (DWORD)dCurInfo[i][iVal] ); } } } } if ( !qMva.GetLength() ) break; uCurID = qMva.Root().m_uDocID; ARRAY_FOREACH ( i, dCurInfo ) dCurInfo[i].Resize ( 0 ); } // accumulate this entry #if PARANOID assert ( dCurInfo [ qMva.Root().m_iAttr ].GetLength()==0 || dCurInfo [ qMva.Root().m_iAttr ].Last()<=qMva.Root().m_uValue ); #endif dCurInfo [ qMva.Root().m_iAttr ].AddUnique ( qMva.Root().m_uValue ); // get next entry int iBin = qMva.Root().m_iTag; qMva.Pop (); MvaEntryTag_t tEntry; ESphBinRead iRes = dBins[iBin]->ReadBytes ( (MvaEntry_t*)&tEntry, sizeof(MvaEntry_t) ); tEntry.m_iTag = iBin; if ( iRes==BIN_READ_OK ) qMva.Push ( tEntry ); if ( iRes==BIN_READ_ERROR ) { m_sLastError.SetSprintf ( "sort_mva: read error" ); return false; } } // clean up readers ARRAY_FOREACH ( i, dBins ) SafeDelete ( dBins[i] ); wrMva.CloseFile (); if ( wrMva.IsError() ) return false; if ( m_pProgress ) m_pProgress ( &m_tProgress, true ); return true; } struct CmpOrdinalsValue_fn { inline bool IsLess ( const Ordinal_t & a, const Ordinal_t & b ) const { return strcmp ( a.m_sValue.cstr(), b.m_sValue.cstr() )<0; } }; struct CmpOrdinalsEntry_fn { static inline bool IsLess ( const OrdinalEntry_t & a, const OrdinalEntry_t & b ) { return strcmp ( a.m_sValue.cstr(), b.m_sValue.cstr() )<0; } }; struct CmpOrdinalsDocid_fn { inline bool IsLess ( const OrdinalId_t & a, const OrdinalId_t & b ) const { return a.m_uDocID < b.m_uDocID; } }; struct CmpMvaEntries_fn { inline bool IsLess ( const MvaEntry_t & a, const MvaEntry_t & b ) const { return a & dOrdinals ) { SphOffset_t uSize = ( sizeof ( SphDocID_t ) + sizeof ( DWORD ) ) * dOrdinals.GetLength (); ARRAY_FOREACH ( i, dOrdinals ) { Ordinal_t & Ord = dOrdinals[i]; DWORD uValueLen = Ord.m_sValue.cstr () ? strlen ( Ord.m_sValue.cstr () ) : 0; Writer.PutBytes ( &(Ord.m_uDocID), sizeof ( Ord.m_uDocID ) ); Writer.PutBytes ( &uValueLen, sizeof ( uValueLen ) ); Writer.PutBytes ( Ord.m_sValue.cstr (), uValueLen ); uSize += uValueLen; if ( Writer.IsError () ) return 0; } return uSize; } ESphBinRead CSphIndex_VLN::ReadOrdinal ( CSphBin & Reader, Ordinal_t & Ordinal ) { ESphBinRead eRes = Reader.ReadBytes ( &Ordinal.m_uDocID, sizeof ( Ordinal.m_uDocID ) ); if ( eRes!=BIN_READ_OK ) return eRes; DWORD uStrLen; eRes = Reader.ReadBytes ( &uStrLen, sizeof ( DWORD ) ); if ( eRes!=BIN_READ_OK ) return eRes; if ( uStrLen>=(DWORD)MAX_ORDINAL_STR_LEN ) return BIN_READ_ERROR; char dBuffer [MAX_ORDINAL_STR_LEN]; if ( uStrLen > 0 ) { eRes = Reader.ReadBytes ( dBuffer, uStrLen ); if ( eRes!=BIN_READ_OK ) return eRes; } dBuffer [uStrLen] = '\0'; Ordinal.m_sValue = dBuffer; return BIN_READ_OK; } bool CSphIndex_VLN::SortOrdinals ( const char * szToFile, int iFromFD, int iArenaSize, int iOrdinalsInPool, CSphVector < CSphVector < SphOffset_t > > & dOrdBlockSize, bool bWarnOfMem ) { int nAttrs = dOrdBlockSize.GetLength (); int nBlocks = dOrdBlockSize[0].GetLength (); CSphWriter Writer; if ( !Writer.OpenFile ( szToFile, m_sLastError ) ) return false; int iBinSize = CSphBin::CalcBinSize ( iArenaSize, nBlocks, "ordinals", bWarnOfMem ); SphOffset_t iSharedOffset = -1; CSphQueue < OrdinalEntry_t, CmpOrdinalsEntry_fn > qOrdinals ( Max ( 1, nBlocks ) ); OrdinalEntry_t tOrdinalEntry; DWORD uOrdinalId = 0; CSphVector < OrdinalId_t > dOrdinalIdPool; dOrdinalIdPool.Reserve ( nBlocks ); CSphVector < CSphVector < SphOffset_t > > dStarts; dStarts.Resize ( nAttrs ); ARRAY_FOREACH ( i, dStarts ) dStarts[i].Resize ( nBlocks ); SphOffset_t uStart = 0; for ( int iBlock = 0; iBlock < nBlocks; iBlock++ ) for ( int iAttr = 0; iAttr < nAttrs; iAttr++ ) { dStarts [iAttr][iBlock] = uStart; uStart += dOrdBlockSize [iAttr][iBlock]; } for ( int iAttr = 0; iAttr < nAttrs; iAttr++ ) { CSphVector < CSphBin > dBins; dBins.Resize ( nBlocks ); ARRAY_FOREACH ( i, dBins ) { dBins[i].m_iFileLeft = (int)dOrdBlockSize[iAttr][i]; dBins[i].m_iFilePos = dStarts[iAttr][i]; dBins[i].Init ( iFromFD, &iSharedOffset, iBinSize ); } dOrdBlockSize [iAttr].Resize ( 0 ); for ( int iBlock = 0; iBlock < nBlocks; iBlock++ ) { if ( ReadOrdinal ( dBins [iBlock], tOrdinalEntry )!=BIN_READ_OK ) { m_sLastError = "sort_ordinals: warmup failed (io error?)"; return false; } tOrdinalEntry.m_iTag = iBlock; qOrdinals.Push ( tOrdinalEntry ); } SphDocID_t uCurID = 0; CSphString sLastOrdValue; int iMyBlock = 0; for ( ;; ) { if ( !qOrdinals.GetLength () || qOrdinals.Root ().m_uDocID!=uCurID ) { if ( uCurID ) { OrdinalId_t tId; tId.m_uDocID = uCurID; tId.m_uId = uOrdinalId; dOrdinalIdPool.Add ( tId ); if ( qOrdinals.GetLength () > 0 ) { if ( sLastOrdValue.cstr()[0]!=qOrdinals.Root ().m_sValue.cstr()[0] ) uOrdinalId++; else if ( strcmp ( sLastOrdValue.cstr (), qOrdinals.Root ().m_sValue.cstr () ) ) uOrdinalId++; } if ( dOrdinalIdPool.GetLength()==iOrdinalsInPool ) { dOrdinalIdPool.Sort ( CmpOrdinalsDocid_fn () ); Writer.PutBytes ( &dOrdinalIdPool[0], sizeof(OrdinalId_t)*dOrdinalIdPool.GetLength() ); if ( Writer.IsError () ) { m_sLastError = "sort_ordinals: io error"; return false; } dOrdBlockSize [iAttr].Add ( dOrdinalIdPool.GetLength () * sizeof ( OrdinalId_t ) ); dOrdinalIdPool.Resize ( 0 ); } } if ( !qOrdinals.GetLength () ) break; uCurID = qOrdinals.Root().m_uDocID; const_cast < CSphString & > ( qOrdinals.Root ().m_sValue ).Swap ( sLastOrdValue ); } // get next entry iMyBlock = qOrdinals.Root().m_iTag; qOrdinals.Pop (); ESphBinRead eRes = ReadOrdinal ( dBins [iMyBlock], tOrdinalEntry ); tOrdinalEntry.m_iTag = iMyBlock; if ( eRes==BIN_READ_OK ) qOrdinals.Push ( tOrdinalEntry ); if ( eRes==BIN_READ_ERROR ) { m_sLastError = "sort_ordinals: read error"; return false; } } // flush last ordinal ids if ( dOrdinalIdPool.GetLength () ) { dOrdinalIdPool.Sort ( CmpOrdinalsDocid_fn () ); Writer.PutBytes ( &dOrdinalIdPool[0], sizeof(OrdinalId_t)*dOrdinalIdPool.GetLength () ); if ( Writer.IsError () ) { m_sLastError = "sort_ordinals: io error"; return false; } dOrdBlockSize [iAttr].Add ( dOrdinalIdPool.GetLength()*sizeof(OrdinalId_t) ); dOrdinalIdPool.Resize ( 0 ); } } Writer.CloseFile (); if ( Writer.IsError () ) return false; return true; } bool CSphIndex_VLN::SortOrdinalIds ( const char * szToFile, int iFromFD, int iArenaSize, CSphVector < CSphVector < SphOffset_t > > & dOrdBlockSize, bool bWarnOfMem ) { int nAttrs = dOrdBlockSize.GetLength (); int nMaxBlocks = 0; ARRAY_FOREACH ( i, dOrdBlockSize ) if ( dOrdBlockSize[i].GetLength () > nMaxBlocks ) nMaxBlocks = dOrdBlockSize[i].GetLength (); CSphWriter Writer; if ( !Writer.OpenFile ( szToFile, m_sLastError ) ) return false; int iBinSize = CSphBin::CalcBinSize ( iArenaSize, nMaxBlocks, "ordinals", bWarnOfMem ); SphOffset_t uStart = 0; OrdinalIdEntry_t tOrdinalIdEntry; OrdinalId_t tOrdinalId; for ( int iAttr = 0; iAttr < nAttrs; ++iAttr ) { int nBlocks = dOrdBlockSize [iAttr].GetLength (); CSphQueue < OrdinalIdEntry_t, CmpOrdinalIdEntry_fn > qOrdinalIds ( Max ( 1, nBlocks ) ); CSphVector < CSphBin > dBins; dBins.Resize ( nBlocks ); SphOffset_t iSharedOffset = -1; ARRAY_FOREACH ( i, dBins ) { dBins[i].m_iFileLeft = (int)dOrdBlockSize [iAttr][i]; dBins[i].m_iFilePos = uStart; dBins[i].Init ( iFromFD, &iSharedOffset, iBinSize ); uStart += dBins[i].m_iFileLeft; } for ( int iBlock = 0; iBlock < nBlocks; iBlock++ ) { if ( dBins[iBlock].ReadBytes ( &tOrdinalId, sizeof ( tOrdinalId ) )!=BIN_READ_OK ) { m_sLastError = "sort_ordinals: warmup failed (io error?)"; return false; } tOrdinalIdEntry.m_uDocID = tOrdinalId.m_uDocID; tOrdinalIdEntry.m_uId = tOrdinalId.m_uId; tOrdinalIdEntry.m_iTag = iBlock; qOrdinalIds.Push ( tOrdinalIdEntry ); } OrdinalId_t tCachedId; tCachedId.m_uDocID = 0; SphOffset_t uResultSize = 0; for ( ;; ) { if ( !qOrdinalIds.GetLength () || qOrdinalIds.Root ().m_uDocID!=tCachedId.m_uDocID ) { if ( tCachedId.m_uDocID ) { uResultSize += sizeof ( OrdinalId_t ); Writer.PutBytes ( &tCachedId, sizeof ( OrdinalId_t ) ); if ( Writer.IsError () ) { m_sLastError = "sort_ordinals: io error"; return false; } } if ( !qOrdinalIds.GetLength () ) break; tCachedId.m_uDocID = qOrdinalIds.Root().m_uDocID; tCachedId.m_uId = qOrdinalIds.Root ().m_uId; } // get next entry int iBlock = qOrdinalIds.Root().m_iTag; qOrdinalIds.Pop (); ESphBinRead eRes = dBins [iBlock].ReadBytes ( &tOrdinalId, sizeof ( tOrdinalId ) ); tOrdinalIdEntry.m_uDocID = tOrdinalId.m_uDocID; tOrdinalIdEntry.m_uId = tOrdinalId.m_uId; tOrdinalIdEntry.m_iTag = iBlock; if ( eRes==BIN_READ_OK ) qOrdinalIds.Push ( tOrdinalIdEntry ); if ( eRes==BIN_READ_ERROR ) { m_sLastError = "sort_ordinals: read error"; return false; } } dOrdBlockSize [iAttr].Resize ( 0 ); dOrdBlockSize [iAttr].Add ( uResultSize ); } return true; } struct FieldMVARedirect_t { CSphAttrLocator m_tLocator; int m_iAttr; int m_iMVAAttr; bool m_bMva64; }; bool CSphIndex_VLN::RelocateBlock ( int iFile, BYTE * pBuffer, int iRelocationSize, SphOffset_t * pFileSize, CSphBin * pMinBin, SphOffset_t * pSharedOffset ) { assert ( pBuffer && pFileSize && pMinBin && pSharedOffset ); SphOffset_t iBlockStart = pMinBin->m_iFilePos; SphOffset_t iBlockLeft = pMinBin->m_iFileLeft; ESphBinRead eRes = pMinBin->Precache (); switch ( eRes ) { case BIN_PRECACHE_OK: return true; case BIN_READ_ERROR: m_sLastError = "block relocation: preread error"; return false; default: break; } int nTransfers = (int)( ( iBlockLeft+iRelocationSize-1) / iRelocationSize ); SphOffset_t uTotalRead = 0; SphOffset_t uNewBlockStart = *pFileSize; for ( int i = 0; i < nTransfers; i++ ) { sphSeek ( iFile, iBlockStart + uTotalRead, SEEK_SET ); int iToRead = i==nTransfers-1 ? (int)( iBlockLeft % iRelocationSize ) : iRelocationSize; size_t iRead = sphReadThrottled ( iFile, pBuffer, iToRead ); if ( iRead!=size_t(iToRead) ) { m_sLastError.SetSprintf ( "block relocation: read error (%d of %d bytes read): %s", (int)iRead, iToRead, strerror(errno) ); return false; } sphSeek ( iFile, *pFileSize, SEEK_SET ); uTotalRead += iToRead; if ( !sphWriteThrottled ( iFile, pBuffer, iToRead, "block relocation", m_sLastError ) ) return false; *pFileSize += iToRead; } assert ( uTotalRead==iBlockLeft ); // update block pointers pMinBin->m_iFilePos = uNewBlockStart; *pSharedOffset = *pFileSize; return true; } static int CountWords ( const CSphString & sData, ISphTokenizer * pTokenizer ) { BYTE * sField = (BYTE*) sData.cstr(); if ( !sField ) return 0; int iCount = 0; pTokenizer->SetBuffer ( sField, (int)strlen ( (char*)sField ) ); while ( pTokenizer->GetToken() ) iCount++; return iCount; } bool CSphIndex_VLN::LoadHitlessWords () { assert ( m_dHitlessWords.GetLength()==0 ); if ( m_tSettings.m_sHitlessFile.IsEmpty() ) return true; CSphAutofile tFile ( m_tSettings.m_sHitlessFile.cstr(), SPH_O_READ, m_sLastError ); if ( tFile.GetFD()==-1 ) return false; CSphVector dBuffer ( (int)tFile.GetSize() ); if ( !tFile.Read ( &dBuffer[0], dBuffer.GetLength(), m_sLastError ) ) return false; m_pTokenizer->SetBuffer ( &dBuffer[0], dBuffer.GetLength() ); while ( BYTE * sToken = m_pTokenizer->GetToken() ) m_dHitlessWords.Add ( m_pDict->GetWordID ( sToken ) ); m_dHitlessWords.Sort(); return true; } static bool sphTruncate ( int iFD ) { #if USE_WINDOWS return SetEndOfFile ( (HANDLE) _get_osfhandle(iFD) )!=0; #else return ::ftruncate ( iFD, ::lseek ( iFD, 0, SEEK_CUR ) )==0; #endif } class DeleteOnFail : public ISphNoncopyable { public: DeleteOnFail() : m_bShitHappened ( true ) {} inline ~DeleteOnFail() { if ( m_bShitHappened ) { ARRAY_FOREACH ( i, m_dWriters ) m_dWriters[i]->UnlinkFile(); ARRAY_FOREACH ( i, m_dAutofiles ) m_dAutofiles[i]->SetTemporary(); } } inline void AddWriter ( CSphWriter* pWr ) { if ( pWr ) m_dWriters.Add ( pWr ); } inline void AddAutofile ( CSphAutofile* pAf ) { if ( pAf ) m_dAutofiles.Add ( pAf ); } inline void AllIsDone() { m_bShitHappened = false; } private: bool m_bShitHappened; CSphVector m_dWriters; CSphVector m_dAutofiles; }; int CSphIndex_VLN::Build ( const CSphVector & dSources, int iMemoryLimit, int iWriteBuffer ) { PROFILER_INIT (); assert ( dSources.GetLength() ); if ( !LoadHitlessWords() ) return 0; m_iWriteBuffer = ( iWriteBuffer>0 ) ? Max ( iWriteBuffer, MIN_WRITE_BUFFER ) : DEFAULT_WRITE_BUFFER; if ( !m_pWriteBuffer ) m_pWriteBuffer = new BYTE [ m_iWriteBuffer ]; m_bWordDict = m_pDict->GetSettings().m_bWordDict; // vars shared between phases CSphVector dBins; SphOffset_t iSharedOffset = -1; m_pDict->HitblockBegin(); // setup sources ARRAY_FOREACH ( iSource, dSources ) { CSphSource * pSource = dSources[iSource]; assert ( pSource ); pSource->SetDict ( m_pDict ); pSource->Setup ( m_tSettings ); } // connect 1st source and fetch its schema if ( !dSources[0]->Connect ( m_sLastError ) || !dSources[0]->IterateStart ( m_sLastError ) || !dSources[0]->UpdateSchema ( &m_tSchema, m_sLastError ) ) { return 0; } // check docinfo if ( m_tSchema.GetAttrsCount()==0 && m_tSettings.m_eDocinfo!=SPH_DOCINFO_NONE ) { sphWarning ( "Attribute count is 0: switching to none docinfo" ); m_tSettings.m_eDocinfo = SPH_DOCINFO_NONE; } if ( m_tSchema.GetAttrsCount()>0 && m_tSettings.m_eDocinfo==SPH_DOCINFO_NONE ) { m_sLastError.SetSprintf ( "got attributes, but docinfo is 'none' (fix your config file)" ); return 0; } bool bHaveFieldMVAs = false; CSphVector dMvaIndexes; CSphVector dMvaLocators; // ordinals and strings storage CSphVector dOrdinalAttrs; CSphVector dStringAttrs; CSphVector dWordcountAttrs; for ( int i=0; i 0 ); if ( bHaveOrdinals && m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN ) { m_sLastError.SetSprintf ( "ordinal string attributes require docinfo=extern (fix your config file)" ); return 0; } if ( dStringAttrs.GetLength() && m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN ) { m_sLastError.SetSprintf ( "string attributes require docinfo=extern (fix your config file)" ); return 0; } //////////////////////////////////////////////// // collect and partially sort hits and docinfos //////////////////////////////////////////////// // killlist storage CSphVector dKillList; // adjust memory requirements int iOldLimit = iMemoryLimit; // book memory to store at least 64K attribute rows const int iDocinfoStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); int iDocinfoMax = Max ( 65536, iMemoryLimit/16/iDocinfoStride/sizeof(DWORD) ); if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_NONE ) iDocinfoMax = 1; // book at least 32 KB for ordinals, if needed int iOrdinalPoolSize = Max ( 32768, iMemoryLimit/8 ); if ( !bHaveOrdinals ) iOrdinalPoolSize = 0; // book at least 32 KB for field MVAs, if needed int iFieldMVAPoolSize = Max ( 32768, iMemoryLimit/16 ); if ( bHaveFieldMVAs==0 ) iFieldMVAPoolSize = 0; // book at least 2 MB for keywords dict, if needed int iDictSize = 0; if ( m_bWordDict ) iDictSize = Max ( MIN_KEYWORDS_DICT, iMemoryLimit/8 ); // do we have enough left for hits? int iHitsMax = 1048576; iMemoryLimit -= iDocinfoMax*iDocinfoStride*sizeof(DWORD) + iOrdinalPoolSize + iFieldMVAPoolSize + iDictSize; if ( iMemoryLimit < iHitsMax*(int)sizeof(CSphWordHit) ) { iMemoryLimit = iOldLimit + iHitsMax*sizeof(CSphWordHit) - iMemoryLimit; sphWarn ( "collect_hits: mem_limit=%d kb too low, increasing to %d kb", iOldLimit/1024, iMemoryLimit/1024 ); } else { iHitsMax = iMemoryLimit / sizeof(CSphWordHit); } // allocate raw hits block CSphAutoArray dHits ( iHitsMax + MAX_SOURCE_HITS ); CSphWordHit * pHits = dHits; CSphWordHit * pHitsMax = dHits + iHitsMax; // allocate docinfos buffer CSphAutoArray dDocinfos ( iDocinfoMax*iDocinfoStride ); DWORD * pDocinfo = dDocinfos; const DWORD * pDocinfoMax = dDocinfos + iDocinfoMax*iDocinfoStride; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_NONE ) { pDocinfo = NULL; pDocinfoMax = NULL; } int nOrdinals = 0; SphOffset_t uMaxOrdinalAttrBlockSize = 0; int iCurrentBlockSize = 0; CSphVector < CSphVector < Ordinal_t > > dOrdinals; dOrdinals.Resize ( dOrdinalAttrs.GetLength() ); ARRAY_FOREACH ( i, dOrdinals ) dOrdinals[i].Reserve ( 65536 ); CSphVector < CSphVector > dOrdBlockSize; dOrdBlockSize.Resize ( dOrdinalAttrs.GetLength () ); ARRAY_FOREACH ( i, dOrdBlockSize ) dOrdBlockSize[i].Reserve ( 8192 ); int iMaxOrdLen = 0; CSphVector < MvaEntry_t > dFieldMVAs; dFieldMVAs.Reserve ( 16384 ); CSphVector < SphOffset_t > dFieldMVABlocks; dFieldMVABlocks.Reserve ( 4096 ); CSphVector < FieldMVARedirect_t > dFieldMvaIndexes; if ( bHaveFieldMVAs ) dFieldMvaIndexes.Reserve ( 8 ); int iMaxPoolFieldMVAs = iFieldMVAPoolSize / sizeof ( MvaEntry_t ); int nFieldMVAs = 0; // create temp files CSphAutofile fdLock ( GetIndexFileName("tmp0"), SPH_O_NEW, m_sLastError, true ); CSphAutofile fdHits ( GetIndexFileName ( m_bInplaceSettings ? "spp" : "tmp1" ), SPH_O_NEW, m_sLastError, !m_bInplaceSettings ); CSphAutofile fdDocinfos ( GetIndexFileName ( m_bInplaceSettings ? "spa" : "tmp2" ), SPH_O_NEW, m_sLastError, !m_bInplaceSettings ); CSphAutofile fdTmpFieldMVAs ( GetIndexFileName("tmp7"), SPH_O_NEW, m_sLastError, true ); CSphWriter tOrdWriter; CSphWriter tStrWriter; CSphString sRawOrdinalsFile = GetIndexFileName("tmp4"); if ( bHaveOrdinals && !tOrdWriter.OpenFile ( sRawOrdinalsFile.cstr (), m_sLastError ) ) return 0; if ( !tStrWriter.OpenFile ( GetIndexFileName("sps"), m_sLastError ) ) return 0; tStrWriter.PutByte ( 0 ); // dummy byte, to reserve magic zero offset DeleteOnFail dFileWatchdog; if ( m_bInplaceSettings ) { dFileWatchdog.AddAutofile ( &fdHits ); dFileWatchdog.AddAutofile ( &fdDocinfos ); } dFileWatchdog.AddWriter ( &tStrWriter ); if ( fdLock.GetFD()<0 || fdHits.GetFD()<0 || fdDocinfos.GetFD()<0 || fdTmpFieldMVAs.GetFD ()<0 ) return 0; SphOffset_t iHitsGap = 0; SphOffset_t iDocinfosGap = 0; if ( m_bInplaceSettings ) { const int HIT_SIZE_AVG = 4; const float HIT_BLOCK_FACTOR = 1.0f; const float DOCINFO_BLOCK_FACTOR = 1.0f; if ( m_iHitGap ) iHitsGap = (SphOffset_t) m_iHitGap; else iHitsGap = (SphOffset_t)( iHitsMax*HIT_BLOCK_FACTOR*HIT_SIZE_AVG ); iHitsGap = Max ( iHitsGap, 1 ); sphSeek ( fdHits.GetFD (), iHitsGap, SEEK_SET ); if ( m_iDocinfoGap ) iDocinfosGap = (SphOffset_t) m_iDocinfoGap; else iDocinfosGap = (SphOffset_t)( iDocinfoMax*DOCINFO_BLOCK_FACTOR*iDocinfoStride*sizeof(DWORD) ); iDocinfosGap = Max ( iDocinfosGap, 1 ); sphSeek ( fdDocinfos.GetFD (), iDocinfosGap, SEEK_SET ); } if ( !sphLockEx ( fdLock.GetFD(), false ) ) { m_sLastError.SetSprintf ( "failed to lock '%s': another indexer running?", fdLock.GetFilename() ); return 0; } // setup accumulating docinfo IDs range m_pMin->Reset ( m_tSchema.GetRowSize() ); for ( int i=0; im_pDynamic[i] = ROWITEM_MAX; m_pMin->m_iDocID = DOCID_MAX; // build raw log PROFILE_BEGIN ( collect_hits ); m_tStats.Reset (); m_tProgress.m_ePhase = CSphIndexProgress::PHASE_COLLECT; m_tProgress.m_iAttrs = 0; CSphVector dHitBlocks; dHitBlocks.Reserve ( 1024 ); int iDocinfoBlocks = 0; ARRAY_FOREACH ( iSource, dSources ) { // connect and check schema, if it's not the first one CSphSource * pSource = dSources[iSource]; if ( iSource ) if ( !pSource->Connect ( m_sLastError ) || !pSource->IterateStart ( m_sLastError ) || !pSource->UpdateSchema ( &m_tSchema, m_sLastError ) ) { return 0; } dFieldMvaIndexes.Resize ( 0 ); ARRAY_FOREACH ( i, dMvaIndexes ) { int iAttr = dMvaIndexes[i]; const CSphColumnInfo & tCol = m_tSchema.GetAttr ( iAttr ); if ( tCol.m_eSrc==SPH_ATTRSRC_FIELD ) { FieldMVARedirect_t & tRedirect = dFieldMvaIndexes.Add(); tRedirect.m_tLocator = tCol.m_tLocator; tRedirect.m_iAttr = iAttr; tRedirect.m_iMVAAttr = i; tRedirect.m_bMva64 = ( tCol.m_eAttrType==SPH_ATTR_UINT64SET ); } } // joined filter bool bGotJoined = ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_INLINE ) && pSource->HasJoinedFields(); CSphVector dAllIds; // FIXME! unlimited RAM use.. // fetch documents for ( ;; ) { // get next doc, and handle errors bool bGotDoc = pSource->IterateDocument ( m_sLastError ); if ( !bGotDoc ) return 0; // ensure docid is sane if ( pSource->m_tDocInfo.m_iDocID==DOCID_MAX ) { m_sLastError.SetSprintf ( "docid==DOCID_MAX (source broken?)" ); return 0; } // check for eof if ( !pSource->m_tDocInfo.m_iDocID ) break; if ( bGotJoined ) dAllIds.Add ( pSource->m_tDocInfo.m_iDocID ); // show progress bar if ( m_pProgress && ( ( pSource->GetStats().m_iTotalDocuments % 1000 )==0 ) ) { m_tProgress.m_iDocuments = m_tStats.m_iTotalDocuments + pSource->GetStats().m_iTotalDocuments; m_tProgress.m_iBytes = m_tStats.m_iTotalBytes + pSource->GetStats().m_iTotalBytes; m_pProgress ( &m_tProgress, false ); } // update crashdump g_iIndexerCurrentDocID = pSource->m_tDocInfo.m_iDocID; g_iIndexerCurrentHits = pHits-dHits; // store field MVAs if ( bHaveFieldMVAs ) { ARRAY_FOREACH ( i, dFieldMvaIndexes ) { CSphAttrLocator tLoc = dFieldMvaIndexes[i].m_tLocator; int iAttr = dFieldMvaIndexes[i].m_iAttr; int iMVA = dFieldMvaIndexes[i].m_iMVAAttr; bool bMva64 = dFieldMvaIndexes[i].m_bMva64; int iStep = ( bMva64 ? 2 : 1 ); // store per-document MVAs SphRange_t tFieldMva = pSource->IterateFieldMVAStart ( iAttr ); m_tProgress.m_iAttrs += ( tFieldMva.m_iLength / iStep ); assert ( ( tFieldMva.m_iStart + tFieldMva.m_iLength )<=pSource->m_dMva.GetLength() ); for ( int i=tFieldMva.m_iStart; i<( tFieldMva.m_iStart+tFieldMva.m_iLength); i+=iStep ) { MvaEntry_t & tMva = dFieldMVAs.Add(); tMva.m_uDocID = pSource->m_tDocInfo.m_iDocID; tMva.m_iAttr = iMVA; if ( bMva64 ) { tMva.m_uValue = MVA_UPSIZE ( pSource->m_dMva.Begin() + i ); } else { tMva.m_uValue = pSource->m_dMva[i]; } int iLength = dFieldMVAs.GetLength (); if ( iLength==iMaxPoolFieldMVAs ) { dFieldMVAs.Sort ( CmpMvaEntries_fn () ); if ( !sphWriteThrottled ( fdTmpFieldMVAs.GetFD (), &dFieldMVAs[0], iLength*sizeof(MvaEntry_t), "temp_field_mva", m_sLastError ) ) return 0; dFieldMVAs.Resize ( 0 ); nFieldMVAs += iMaxPoolFieldMVAs; } } } } // store ordinals iCurrentBlockSize += ( sizeof ( SphOffset_t ) + sizeof ( DWORD ) ) * dOrdinalAttrs.GetLength (); ARRAY_FOREACH ( i, dOrdinalAttrs ) { CSphVector & dCol = dOrdinals[i]; dCol.Add(); Ordinal_t & tLastOrd = dCol.Last(); tLastOrd.m_uDocID = pSource->m_tDocInfo.m_iDocID; Swap ( tLastOrd.m_sValue, pSource->m_dStrAttrs[dOrdinalAttrs[i]] ); int iOrdStrLen = strlen ( tLastOrd.m_sValue.cstr () ); if ( iOrdStrLen > MAX_ORDINAL_STR_LEN ) { iMaxOrdLen = iOrdStrLen; // truncate iOrdStrLen = MAX_ORDINAL_STR_LEN; tLastOrd.m_sValue = tLastOrd.m_sValue.SubString ( 0, iOrdStrLen - 1 ); } iCurrentBlockSize += iOrdStrLen; } if ( bHaveOrdinals ) { if ( iCurrentBlockSize>=iOrdinalPoolSize ) { iCurrentBlockSize = 0; nOrdinals += dOrdinals[0].GetLength (); ARRAY_FOREACH ( i, dOrdinalAttrs ) { CSphVector & dCol = dOrdinals[i]; dCol.Sort ( CmpOrdinalsValue_fn() ); SphOffset_t uSize = DumpOrdinals ( tOrdWriter, dCol ); if ( !uSize ) { m_sLastError = "dump ordinals: io error"; return 0; } if ( uSize > uMaxOrdinalAttrBlockSize ) uMaxOrdinalAttrBlockSize = uSize; dOrdBlockSize[i].Add ( uSize ); dCol.Resize ( 0 ); } } } // store strings ARRAY_FOREACH ( i, dStringAttrs ) { // FIXME! optimize locators etc? // FIXME! support binary strings w/embedded zeroes? // get data, calc length const char * sData = pSource->m_dStrAttrs[dStringAttrs[i]].cstr(); int iLen = sData ? strlen ( sData ) : 0; if ( iLen ) { // calc offset, do sanity checks SphOffset_t uOff = tStrWriter.GetPos(); if ( uint64_t(uOff)>>32 ) { m_sLastError.SetSprintf ( "too many string attributes (current index format allows up to 4 GB)" ); return 0; } pSource->m_tDocInfo.SetAttr ( m_tSchema.GetAttr ( dStringAttrs[i] ).m_tLocator, DWORD(uOff) ); // pack length, emit it, emit data BYTE dPackedLen[4]; int iLenLen = sphPackStrlen ( dPackedLen, iLen ); tStrWriter.PutBytes ( &dPackedLen, iLenLen ); tStrWriter.PutBytes ( sData, iLen ); } else { // no data pSource->m_tDocInfo.SetAttr ( m_tSchema.GetAttr ( dStringAttrs[i] ).m_tLocator, 0 ); } } // count words ARRAY_FOREACH ( i, dWordcountAttrs ) { int iAttr = dWordcountAttrs[i]; int iNumWords = CountWords ( pSource->m_dStrAttrs[iAttr], m_pTokenizer ); pSource->m_tDocInfo.SetAttr ( m_tSchema.GetAttr(iAttr).m_tLocator, iNumWords ); } // update min docinfo assert ( pSource->m_tDocInfo.m_iDocID ); m_pMin->m_iDocID = Min ( m_pMin->m_iDocID, pSource->m_tDocInfo.m_iDocID ); if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) for ( int i=0; im_pDynamic[i] = Min ( m_pMin->m_pDynamic[i], pSource->m_tDocInfo.m_pDynamic[i] ); // store docinfo if ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_NONE ) { // store next entry DOCINFOSETID ( pDocinfo, pSource->m_tDocInfo.m_iDocID ); memcpy ( DOCINFO2ATTRS ( pDocinfo ), pSource->m_tDocInfo.m_pDynamic, sizeof(CSphRowitem)*m_tSchema.GetRowSize() ); pDocinfo += iDocinfoStride; // if not inlining, flush buffer if it's full // (if inlining, it will flushed later, along with the hits) if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && pDocinfo>=pDocinfoMax ) { assert ( pDocinfo==pDocinfoMax ); int iLen = iDocinfoMax*iDocinfoStride*sizeof(DWORD); sphSortDocinfos ( dDocinfos, iDocinfoMax, iDocinfoStride ); if ( !sphWriteThrottled ( fdDocinfos.GetFD(), dDocinfos, iLen, "raw_docinfos", m_sLastError ) ) return 0; pDocinfo = dDocinfos; iDocinfoBlocks++; } } // store hits while ( const ISphHits * pDocHits = pSource->IterateHits ( m_sLastWarning ) ) { int iDocHits = pDocHits->Length(); #if PARANOID for ( int i=0; im_dData[i].m_iDocID==pSource->m_tDocInfo.m_iDocID ); assert ( pDocHits->m_dData[i].m_iWordID ); assert ( pDocHits->m_dData[i].m_iWordPos ); } #endif assert ( ( pHits+iDocHits )<=( pHitsMax+MAX_SOURCE_HITS ) ); memcpy ( pHits, pDocHits->First(), iDocHits*sizeof(CSphWordHit) ); pHits += iDocHits; // check if we need to flush if ( pHits=pDocinfoMax ) && !( iDictSize && m_pDict->HitblockGetMemUse() > iDictSize ) ) { continue; } // update crashdump g_iIndexerPoolStartDocID = pSource->m_tDocInfo.m_iDocID; g_iIndexerPoolStartHit = pHits-dHits; // sort hits int iHits = pHits - dHits; { PROFILE ( sort_hits ); sphSort ( &dHits[0], iHits, CmpHit_fn() ); m_pDict->HitblockPatch ( &dHits[0], iHits ); } pHits = dHits; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) { // we're inlining, so let's flush both hits and docs int iDocs = ( pDocinfo - dDocinfos ) / iDocinfoStride; pDocinfo = dDocinfos; sphSortDocinfos ( pDocinfo, iDocs, iDocinfoStride ); dHitBlocks.Add ( cidxWriteRawVLB ( fdHits.GetFD(), dHits, iHits, dDocinfos, iDocs, iDocinfoStride ) ); // we are inlining, so if there are more hits in this document, // we'll need to know it's info next flush if ( iDocHits ) { DOCINFOSETID ( pDocinfo, pSource->m_tDocInfo.m_iDocID ); memcpy ( DOCINFO2ATTRS ( pDocinfo ), pSource->m_tDocInfo.m_pDynamic, sizeof(CSphRowitem)*m_tSchema.GetRowSize() ); pDocinfo += iDocinfoStride; } } else { // we're not inlining, so only flush hits, docs are flushed independently dHitBlocks.Add ( cidxWriteRawVLB ( fdHits.GetFD(), dHits, iHits, NULL, 0, 0 ) ); } m_pDict->HitblockReset (); if ( dHitBlocks.Last()<0 ) return 0; // progress bar m_tProgress.m_iHitsTotal += iHits; if ( m_pProgress ) { m_tProgress.m_iDocuments = m_tStats.m_iTotalDocuments + pSource->GetStats().m_iTotalDocuments; m_tProgress.m_iBytes = m_tStats.m_iTotalBytes + pSource->GetStats().m_iTotalBytes; m_pProgress ( &m_tProgress, false ); } } } // FIXME! uncontrolled memory usage; add checks and/or diskbased sort in the future? if ( pSource->IterateKillListStart ( m_sLastError ) ) { SphDocID_t tDocId; while ( pSource->IterateKillListNext ( tDocId ) ) dKillList.Add ( tDocId ); } // fetch joined fields if ( bGotJoined ) { dAllIds.Uniq(); SphDocID_t uLastID = 0; bool bLastFound = 0; for ( ;; ) { // get next doc, and handle errors ISphHits * pJoinedHits = pSource->IterateJoinedHits ( m_sLastError ); if ( !pJoinedHits ) return 0; // ensure docid is sane if ( pSource->m_tDocInfo.m_iDocID==DOCID_MAX ) { m_sLastError.SetSprintf ( "joined_docid==DOCID_MAX (source broken?)" ); return 0; } // check for eof if ( !pSource->m_tDocInfo.m_iDocID ) break; // filter and store hits for ( const CSphWordHit * pHit = pJoinedHits->First(); pHit<=pJoinedHits->Last(); pHit++ ) { // flush if needed if ( pHits>=pHitsMax ) { // sort hits int iHits = pHits - dHits; { PROFILE ( sort_hits ); sphSort ( &dHits[0], iHits, CmpHit_fn() ); m_pDict->HitblockPatch ( &dHits[0], iHits ); } pHits = dHits; // we're not inlining, so only flush hits, docs are flushed independently dHitBlocks.Add ( cidxWriteRawVLB ( fdHits.GetFD(), dHits, iHits, NULL, 0, 0 ) ); m_pDict->HitblockReset (); if ( dHitBlocks.Last()<0 ) return 0; } // filter SphDocID_t uHitID = pHit->m_iDocID; if ( uHitID!=uLastID ) { uLastID = uHitID; bLastFound = ( dAllIds.BinarySearch ( uHitID )!=NULL ); } // copy next hit if ( bLastFound ) *pHits++ = *pHit; } } } // this source is over, disconnect and update stats pSource->Disconnect (); m_tStats.m_iTotalDocuments += pSource->GetStats().m_iTotalDocuments; m_tStats.m_iTotalBytes += pSource->GetStats().m_iTotalBytes; } // flush last docinfo block int iDocinfoLastBlockSize = 0; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && pDocinfo>dDocinfos ) { iDocinfoLastBlockSize = ( pDocinfo - dDocinfos ) / iDocinfoStride; assert ( pDocinfo==( dDocinfos + iDocinfoLastBlockSize*iDocinfoStride ) ); int iLen = iDocinfoLastBlockSize*iDocinfoStride*sizeof(DWORD); sphSortDocinfos ( dDocinfos, iDocinfoLastBlockSize, iDocinfoStride ); if ( !sphWriteThrottled ( fdDocinfos.GetFD(), dDocinfos, iLen, "raw_docinfos", m_sLastError ) ) return 0; iDocinfoBlocks++; } // flush last hit block if ( pHits>dHits ) { int iHits = pHits - dHits; { PROFILE ( sort_hits ); sphSort ( &dHits[0], iHits, CmpHit_fn() ); m_pDict->HitblockPatch ( &dHits[0], iHits ); } m_tProgress.m_iHitsTotal += iHits; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) { int iDocs = ( pDocinfo - dDocinfos ) / iDocinfoStride; sphSortDocinfos ( dDocinfos, iDocs, iDocinfoStride ); dHitBlocks.Add ( cidxWriteRawVLB ( fdHits.GetFD(), dHits, iHits, dDocinfos, iDocs, iDocinfoStride ) ); } else { dHitBlocks.Add ( cidxWriteRawVLB ( fdHits.GetFD(), dHits, iHits, NULL, 0, 0 ) ); } m_pDict->HitblockReset (); if ( dHitBlocks.Last()<0 ) return 0; } // flush last field MVA block if ( bHaveFieldMVAs && dFieldMVAs.GetLength () ) { int iLength = dFieldMVAs.GetLength (); nFieldMVAs += iLength; dFieldMVAs.Sort ( CmpMvaEntries_fn () ); if ( !sphWriteThrottled ( fdTmpFieldMVAs.GetFD (), &dFieldMVAs[0], iLength*sizeof(MvaEntry_t), "temp_field_mva", m_sLastError ) ) return 0; dFieldMVAs.Reset (); } // flush last ordinals block if ( bHaveOrdinals && dOrdinals[0].GetLength () ) { nOrdinals += dOrdinals[0].GetLength (); ARRAY_FOREACH ( i, dOrdinalAttrs ) { CSphVector & dCol = dOrdinals[i]; dCol.Sort ( CmpOrdinalsValue_fn() ); SphOffset_t uSize = DumpOrdinals ( tOrdWriter, dCol ); if ( !uSize ) { m_sLastError = "dump ordinals: io error"; return 0; } if ( uSize > uMaxOrdinalAttrBlockSize ) uMaxOrdinalAttrBlockSize = uSize; dOrdBlockSize[i].Add ( uSize ); dCol.Reset (); } } if ( m_pProgress ) { m_tProgress.m_iDocuments = m_tStats.m_iTotalDocuments; m_tProgress.m_iBytes = m_tStats.m_iTotalBytes; m_pProgress ( &m_tProgress, true ); } PROFILE_END ( collect_hits ); /////////////////////////////////////// // collect and sort multi-valued attrs /////////////////////////////////////// if ( !BuildMVA ( dSources, dHits, iHitsMax*sizeof(CSphWordHit), fdTmpFieldMVAs.GetFD (), nFieldMVAs, iMaxPoolFieldMVAs ) ) return 0; // reset persistent mva update pool ::unlink ( GetIndexFileName("mvp").cstr() ); // reset hits pool dHits.Reset (); CSphString sFieldMVAFile = fdTmpFieldMVAs.GetFilename (); fdTmpFieldMVAs.Close (); ::unlink ( sFieldMVAFile.cstr () ); ///////////////// // sort docinfos ///////////////// tOrdWriter.CloseFile (); if ( tOrdWriter.IsError () ) return 0; CSphString sSortedOrdinalIdFile = GetIndexFileName("tmp6"); // sort ordinals if ( bHaveOrdinals && !dOrdBlockSize[0].GetLength () ) { bHaveOrdinals = false; ::unlink ( sRawOrdinalsFile.cstr () ); } if ( bHaveOrdinals ) { if ( iMaxOrdLen > MAX_ORDINAL_STR_LEN ) sphWarn ( "some ordinal attributes are too long (len=%d,max=%d)", iMaxOrdLen, MAX_ORDINAL_STR_LEN ); CSphString sUnsortedIdFile = GetIndexFileName("tmp5"); CSphAutofile fdRawOrdinals ( sRawOrdinalsFile.cstr (), SPH_O_READ, m_sLastError, true ); if ( fdRawOrdinals.GetFD () < 0 ) return 0; const float ARENA_PERCENT = 0.5f; int nBlocks = dOrdBlockSize[0].GetLength (); SphOffset_t uMemNeededForReaders = SphOffset_t ( nBlocks ) * uMaxOrdinalAttrBlockSize; SphOffset_t uMemNeededForSorting = sizeof ( OrdinalId_t ) * nOrdinals; int iArenaSize = (int) Min ( SphOffset_t ( iMemoryLimit * ARENA_PERCENT ), uMemNeededForReaders ); iArenaSize = Max ( CSphBin::MIN_SIZE * nBlocks, iArenaSize ); int iOrdinalsInPool = (int) Min ( SphOffset_t ( iMemoryLimit * ( 1.0f - ARENA_PERCENT ) ), uMemNeededForSorting ) / sizeof ( OrdinalId_t ); if ( !SortOrdinals ( sUnsortedIdFile.cstr (), fdRawOrdinals.GetFD (), iArenaSize, iOrdinalsInPool, dOrdBlockSize, iArenaSize < uMemNeededForReaders ) ) return 0; CSphAutofile fdUnsortedId ( sUnsortedIdFile.cstr (), SPH_O_READ, m_sLastError, true ); if ( fdUnsortedId.GetFD () < 0 ) return 0; iArenaSize = Min ( iMemoryLimit, (int)uMemNeededForSorting ); iArenaSize = Max ( CSphBin::MIN_SIZE * ( nOrdinals / iOrdinalsInPool + 1 ), iArenaSize ); if ( !SortOrdinalIds ( sSortedOrdinalIdFile.cstr (), fdUnsortedId.GetFD (), iArenaSize, dOrdBlockSize, iArenaSize < uMemNeededForSorting ) ) return 0; } // initialize MVA reader CSphAutoreader rdMva; if ( !rdMva.Open ( GetIndexFileName("spm"), m_sLastError ) ) return 0; SphDocID_t uMvaID = rdMva.GetDocid(); // initialize writer int iDocinfoFD = -1; SphOffset_t iDocinfoWritePos = 0; CSphScopedPtr pfdDocinfoFinal ( NULL ); if ( m_bInplaceSettings ) iDocinfoFD = fdDocinfos.GetFD (); else { pfdDocinfoFinal = new CSphAutofile ( GetIndexFileName("spa"), SPH_O_NEW, m_sLastError ); iDocinfoFD = pfdDocinfoFinal->GetFD(); if ( iDocinfoFD < 0 ) return 0; } int iDupes = 0; int iMinBlock = -1; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && dHitBlocks.GetLength() ) { // initialize readers assert ( dBins.GetLength()==0 ); dBins.Reserve ( iDocinfoBlocks ); float fReadFactor = 1.0f; float fRelocFactor = 0.0f; if ( m_bInplaceSettings ) { assert ( m_fRelocFactor > 0.005f && m_fRelocFactor < 0.95f ); fRelocFactor = m_fRelocFactor; fReadFactor -= fRelocFactor; } int iBinSize = CSphBin::CalcBinSize ( int ( iMemoryLimit * fReadFactor ), iDocinfoBlocks, "sort_docinfos" ); int iRelocationSize = m_bInplaceSettings ? int ( iMemoryLimit * fRelocFactor ) : 0; CSphAutoArray pRelocationBuffer ( iRelocationSize ); iSharedOffset = -1; for ( int i=0; im_iFileLeft = ( ( i==iDocinfoBlocks-1 ) ? iDocinfoLastBlockSize : iDocinfoMax )*iDocinfoStride*sizeof(DWORD); dBins[i]->m_iFilePos = ( i==0 ) ? iDocinfosGap : dBins[i-1]->m_iFilePos + dBins[i-1]->m_iFileLeft; dBins[i]->Init ( fdDocinfos.GetFD(), &iSharedOffset, iBinSize ); } SphOffset_t iDocinfoFileSize = 0; if ( iDocinfoBlocks ) iDocinfoFileSize = dBins [iDocinfoBlocks-1]->m_iFilePos + dBins [iDocinfoBlocks-1]->m_iFileLeft; // docinfo queue CSphAutoArray dDocinfoQueue ( iDocinfoBlocks*iDocinfoStride ); CSphQueue < int, CmpQueuedDocinfo_fn > qDocinfo ( iDocinfoBlocks ); CmpQueuedDocinfo_fn::m_pStorage = dDocinfoQueue; CmpQueuedDocinfo_fn::m_iStride = iDocinfoStride; pDocinfo = dDocinfoQueue; for ( int i=0; iReadBytes ( pDocinfo, iDocinfoStride*sizeof(DWORD) )!=BIN_READ_OK ) { m_sLastError.SetSprintf ( "sort_docinfos: warmup failed (io error?)" ); return 0; } pDocinfo += iDocinfoStride; qDocinfo.Push ( i ); } CSphVector < CSphBin > dOrdReaders; SphOffset_t iSharedOrdOffset = -1; CSphAutofile fdTmpSortedIds ( sSortedOrdinalIdFile.cstr (), SPH_O_READ, m_sLastError, true ); if ( bHaveOrdinals ) { if ( fdTmpSortedIds.GetFD () < 0 ) return 0; dOrdReaders.Resize ( dOrdinalAttrs.GetLength () ); SphOffset_t uStart = 0; ARRAY_FOREACH ( i, dOrdReaders ) { dOrdReaders[i].m_iFileLeft = (int)dOrdBlockSize [i][0]; dOrdReaders[i].m_iFilePos = uStart; dOrdReaders[i].Init ( fdTmpSortedIds.GetFD(), &iSharedOrdOffset, ORDINAL_READ_SIZE ); uStart += dOrdReaders[i].m_iFileLeft; } } // while the queue has data for us int iOrd = 0; pDocinfo = dDocinfos; SphDocID_t uLastId = 0; m_uMinMaxIndex = 0; // prepare the collector for min/max of attributes AttrIndexBuilder_c tMinMax ( m_tSchema ); CSphVector dMinMaxBuffer ( tMinMax.GetExpectedSize ( m_tStats.m_iTotalDocuments ) ); CSphDocMVA tCurInfo ( dMvaIndexes.GetLength() ); tMinMax.Prepare ( dMinMaxBuffer.Begin(), dMinMaxBuffer.Begin() + dMinMaxBuffer.GetLength() ); SphDocID_t uLastDupe = 0; while ( qDocinfo.GetLength() ) { // obtain bin index and next entry int iBin = qDocinfo.Root(); DWORD * pEntry = dDocinfoQueue + iBin*iDocinfoStride; if ( DOCINFO2ID ( pEntry )=DOCINFO2ID(pEntry) ); if ( uMvaID==DOCINFO2ID(pEntry) ) { ARRAY_FOREACH ( i, dMvaIndexes ) { sphSetRowAttr ( DOCINFO2ATTRS(pEntry), dMvaLocators[i], SphAttr_t(rdMva.GetPos()/sizeof(DWORD)) ); // intentional clamp; we'll check for 32bit overflow later DWORD iMvaCount = rdMva.GetDword(); tCurInfo.m_dMVA[i].Reserve ( iMvaCount ); while ( iMvaCount-- ) { tCurInfo.m_dMVA[i].Add ( rdMva.GetDword() ); } } uMvaID = rdMva.GetDocid(); if ( !uMvaID ) uMvaID = DOCID_MAX; } } tMinMax.Collect ( pEntry, tCurInfo ); ARRAY_FOREACH ( i, tCurInfo.m_dMVA ) tCurInfo.m_dMVA[i].Resize ( 0 ); // emit it memcpy ( pDocinfo, pEntry, iDocinfoStride*sizeof(DWORD) ); pDocinfo += iDocinfoStride; uLastId = DOCINFO2ID(pEntry); if ( pDocinfo>=pDocinfoMax ) { int iLen = iDocinfoMax*iDocinfoStride*sizeof(DWORD); if ( m_bInplaceSettings ) { if ( iMinBlock==-1 || dBins[iMinBlock]->IsEOF () ) { iMinBlock = -1; ARRAY_FOREACH ( i, dBins ) if ( !dBins[i]->IsEOF () && ( iMinBlock==-1 || dBins [i]->m_iFilePosm_iFilePos ) ) iMinBlock = i; } if ( iMinBlock!=-1 && ( iDocinfoWritePos + iLen ) > dBins[iMinBlock]->m_iFilePos ) { if ( !RelocateBlock ( iDocinfoFD, (BYTE*)pRelocationBuffer, iRelocationSize, &iDocinfoFileSize, dBins[iMinBlock], &iSharedOffset ) ) return 0; iMinBlock = (iMinBlock+1) % dBins.GetLength (); } sphSeek ( iDocinfoFD, iDocinfoWritePos, SEEK_SET ); iSharedOffset = iDocinfoWritePos; } if ( !sphWriteThrottled ( iDocinfoFD, dDocinfos, iLen, "sort_docinfo", m_sLastError ) ) return 0; iDocinfoWritePos += iLen; pDocinfo = dDocinfos; } } // pop its index, update it, push its index again qDocinfo.Pop (); ESphBinRead eRes = dBins[iBin]->ReadBytes ( pEntry, iDocinfoStride*sizeof(DWORD) ); if ( eRes==BIN_READ_ERROR ) { m_sLastError.SetSprintf ( "sort_docinfo: failed to read entry" ); return 0; } if ( eRes==BIN_READ_OK ) qDocinfo.Push ( iBin ); } if ( pDocinfo>dDocinfos ) { assert ( 0==( pDocinfo-dDocinfos ) % iDocinfoStride ); int iLen = ( pDocinfo - dDocinfos )*sizeof(DWORD); if ( m_bInplaceSettings ) sphSeek ( iDocinfoFD, iDocinfoWritePos, SEEK_SET ); if ( !sphWriteThrottled ( iDocinfoFD, dDocinfos, iLen, "sort_docinfo", m_sLastError ) ) return 0; if ( m_bInplaceSettings ) if ( !sphTruncate ( iDocinfoFD ) ) sphWarn ( "failed to truncate %s", fdDocinfos.GetFilename() ); } tMinMax.FinishCollect(); sphWriteThrottled ( iDocinfoFD, &dMinMaxBuffer[0], sizeof(DWORD) * tMinMax.GetActualSize(), "minmax_docinfo", m_sLastError ); // clean up readers ARRAY_FOREACH ( i, dBins ) SafeDelete ( dBins[i] ); dBins.Reset (); } dDocinfos.Reset (); pDocinfo = NULL; // it might be zero-length, but it must exist if ( m_bInplaceSettings ) fdDocinfos.Close (); else { assert ( pfdDocinfoFinal.Ptr () ); pfdDocinfoFinal->Close (); } // dump killlist CSphAutofile fdKillList ( GetIndexFileName("spk"), SPH_O_NEW, m_sLastError ); if ( fdKillList.GetFD()<0 ) return 0; if ( dKillList.GetLength () ) { dKillList.Uniq (); m_iKillListSize = dKillList.GetLength (); if ( !sphWriteThrottled ( fdKillList.GetFD (), &dKillList[0], m_iKillListSize*sizeof(SphAttr_t), "kill list", m_sLastError ) ) return 0; } fdKillList.Close (); /////////////////////////////////// // sort and write compressed index /////////////////////////////////// PROFILE_BEGIN ( invert_hits ); // initialize readers assert ( dBins.GetLength()==0 ); dBins.Reserve ( dHitBlocks.GetLength() ); iSharedOffset = -1; float fReadFactor = 1.0f; int iRelocationSize = 0; iWriteBuffer = m_iWriteBuffer; if ( m_bInplaceSettings ) { assert ( m_fRelocFactor > 0.005f && m_fRelocFactor < 0.95f ); assert ( m_fWriteFactor > 0.005f && m_fWriteFactor < 0.95f ); assert ( m_fWriteFactor+m_fRelocFactor < 1.0f ); fReadFactor -= m_fRelocFactor + m_fWriteFactor; iRelocationSize = int ( iMemoryLimit * m_fRelocFactor ); iWriteBuffer = int ( iMemoryLimit * m_fWriteFactor ); } int iBinSize = CSphBin::CalcBinSize ( int ( iMemoryLimit * fReadFactor ), dHitBlocks.GetLength() + m_bWordDict, "sort_hits" ); CSphAutoArray pRelocationBuffer ( iRelocationSize ); iSharedOffset = -1; ARRAY_FOREACH ( i, dHitBlocks ) { dBins.Add ( new CSphBin ( m_tSettings.m_eHitless, m_pDict->GetSettings().m_bWordDict ) ); dBins[i]->m_iFileLeft = dHitBlocks[i]; dBins[i]->m_iFilePos = ( i==0 ) ? iHitsGap : dBins[i-1]->m_iFilePos + dBins[i-1]->m_iFileLeft; dBins[i]->Init ( fdHits.GetFD(), &iSharedOffset, iBinSize ); } // if there were no hits, create zero-length index files int iRawBlocks = dBins.GetLength(); ////////////////////////////// // create new index files set ////////////////////////////// // doclist and hitlist files m_wrDoclist.CloseFile (); m_wrHitlist.CloseFile (); m_wrDoclist.SetBufferSize ( m_iWriteBuffer ); m_wrHitlist.SetBufferSize ( m_bInplaceSettings ? iWriteBuffer : m_iWriteBuffer ); if ( !m_wrDoclist.OpenFile ( GetIndexFileName("spd"), m_sLastError ) ) return 0; if ( m_bInplaceSettings ) { sphSeek ( fdHits.GetFD(), 0, SEEK_SET ); m_wrHitlist.SetFile ( fdHits.GetFD(), &iSharedOffset ); } else if ( !m_wrHitlist.OpenFile ( GetIndexFileName("spp"), m_sLastError ) ) return 0; // put dummy byte (otherwise offset would start from 0, first delta would be 0 // and VLB encoding of offsets would fuckup) BYTE bDummy = 1; m_wrDoclist.PutBytes ( &bDummy, 1 ); m_wrHitlist.PutBytes ( &bDummy, 1 ); // dict files CSphAutofile fdTmpDict ( GetIndexFileName("tmp8"), SPH_O_NEW, m_sLastError, true ); CSphAutofile fdDict ( GetIndexFileName("spi"), SPH_O_NEW, m_sLastError, false ); if ( fdTmpDict.GetFD()<0 || fdDict.GetFD()<0 ) return 0; m_pDict->DictBegin ( fdTmpDict.GetFD(), fdDict.GetFD(), iBinSize ); // adjust min IDs, and fill header assert ( m_pMin->m_iDocID>0 ); m_pMin->m_iDocID--; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) for ( int i=0; im_pDynamic[i]--; ////////////// // final sort ////////////// if ( iRawBlocks ) { int iLastBin = dBins.GetLength () - 1; SphOffset_t iHitFileSize = dBins[iLastBin]->m_iFilePos + dBins [iLastBin]->m_iFileLeft; CSphHitQueue tQueue ( iRawBlocks ); CSphAggregateHit tHit; // initialize hitlist encoder state m_tLastHit.m_iDocID = 0; m_tLastHit.m_iWordID = 0; m_tLastHit.m_iWordPos = EMPTY_HIT; m_tLastHit.m_sKeyword = m_sLastKeyword; // initial fill int iRowitems = ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) ? m_tSchema.GetRowSize() : 0; CSphAutoArray dInlineAttrs ( iRawBlocks*iRowitems ); int * bActive = new int [ iRawBlocks ]; for ( int i=0; iReadHit ( &tHit, iRowitems, dInlineAttrs+i*iRowitems ) ) { m_sLastError.SetSprintf ( "sort_hits: warmup failed (io error?)" ); return 0; } bActive[i] = ( tHit.m_iWordID!=0 ); if ( bActive[i] ) tQueue.Push ( tHit, i ); } // init progress meter m_tProgress.m_ePhase = CSphIndexProgress::PHASE_SORT; m_tProgress.m_iHits = 0; // while the queue has data for us // FIXME! analyze binsRead return code int iHitsSorted = 0; iMinBlock = -1; while ( tQueue.m_iUsed ) { int iBin = tQueue.m_pData->m_iBin; // pack and emit queue root tQueue.m_pData->m_iDocID -= m_pMin->m_iDocID; if ( m_bInplaceSettings ) { if ( iMinBlock==-1 || dBins[iMinBlock]->IsEOF () || !bActive[iMinBlock] ) { iMinBlock = -1; ARRAY_FOREACH ( i, dBins ) if ( !dBins[i]->IsEOF () && bActive[i] && ( iMinBlock==-1 || dBins[i]->m_iFilePos < dBins[iMinBlock]->m_iFilePos ) ) iMinBlock = i; } int iToWriteMax = 3*sizeof(DWORD); if ( iMinBlock!=-1 && ( m_wrHitlist.GetPos () + iToWriteMax ) > dBins[iMinBlock]->m_iFilePos ) { if ( !RelocateBlock ( fdHits.GetFD (), (BYTE*)pRelocationBuffer, iRelocationSize, &iHitFileSize, dBins[iMinBlock], &iSharedOffset ) ) return 0; iMinBlock = (iMinBlock+1) % dBins.GetLength (); } } cidxHit ( tQueue.m_pData, iRowitems ? dInlineAttrs+iBin*iRowitems : NULL ); if ( m_pDict->DictIsError() || m_wrDoclist.IsError() || m_wrHitlist.IsError() ) return 0; // pop queue root and push next hit from popped bin tQueue.Pop (); if ( bActive[iBin] ) { dBins[iBin]->ReadHit ( &tHit, iRowitems, dInlineAttrs+iBin*iRowitems ); bActive[iBin] = ( tHit.m_iWordID!=0 ); if ( bActive[iBin] ) tQueue.Push ( tHit, iBin ); } // progress if ( m_pProgress && ++iHitsSorted==1000000 ) { m_tProgress.m_iHits += iHitsSorted; m_pProgress ( &m_tProgress, false ); iHitsSorted = 0; } } if ( m_pProgress ) { m_tProgress.m_iHits = m_tProgress.m_iHitsTotal; // sum might be less than total because of dupes! m_pProgress ( &m_tProgress, true ); } // cleanup SafeDeleteArray ( bActive ); ARRAY_FOREACH ( i, dBins ) SafeDelete ( dBins[i] ); dBins.Reset (); CSphAggregateHit tFlush; tFlush.m_iDocID = 0; tFlush.m_iWordID = 0; tFlush.m_sKeyword = NULL; tFlush.m_iWordPos = EMPTY_HIT; tFlush.m_dFieldMask.Unset(); cidxHit ( &tFlush, NULL ); if ( m_bInplaceSettings ) { m_wrHitlist.CloseFile (); if ( !sphTruncate ( fdHits.GetFD () ) ) sphWarn ( "failed to truncate %s", fdHits.GetFilename() ); } } if ( iDupes ) sphWarn ( "%d duplicate document id pairs found", iDupes ); PROFILE_END ( invert_hits ); // we're done if ( !cidxDone ( "sph", iMemoryLimit ) ) return 0; // when the party's over.. ARRAY_FOREACH ( i, dSources ) dSources[i]->PostIndex (); PROFILER_DONE (); PROFILE_SHOW (); dFileWatchdog.AllIsDone(); return 1; } // NOLINT function length static bool CopyFile ( const char * sSrc, const char * sDst, CSphString & sErrStr ) { assert ( sSrc ); assert ( sDst ); const DWORD iMaxBufSize = 1024 * 1024; CSphAutofile tSrcFile ( sSrc, SPH_O_READ, sErrStr ); CSphAutofile tDstFile ( sDst, SPH_O_NEW, sErrStr ); if ( tSrcFile.GetFD()<0 || tDstFile.GetFD()<0 ) return false; SphOffset_t iFileSize = tSrcFile.GetSize(); DWORD iBufSize = (DWORD) Min ( iFileSize, (SphOffset_t)iMaxBufSize ); if ( iFileSize ) { BYTE * pData = new BYTE[iBufSize]; if ( !pData ) { sErrStr.SetSprintf ( "memory allocation error" ); return false; } bool bError = true; while ( iFileSize > 0 ) { DWORD iSize = (DWORD) Min ( iFileSize, (SphOffset_t)iBufSize ); if ( !tSrcFile.Read ( pData, iSize, sErrStr ) ) break; if ( !sphWriteThrottled ( tDstFile.GetFD(), pData, iSize, "CopyFile", sErrStr ) ) break; iFileSize -= iSize; if ( !iFileSize ) bError = false; } SafeDeleteArray ( pData ); return ( bError==false ); } return true; } SphAttr_t CopyStringAttr ( CSphWriter & wrTo, CSphReader & rdFrom, SphAttr_t uOffset ) { // magic offset? do nothing if ( !uOffset ) return 0; // aim rdFrom.SeekTo ( uOffset, 0 ); // read and decode length // MUST be in sync with sphUnpackStr int iLen = rdFrom.GetByte (); if ( iLen & 0x80 ) { if ( iLen & 0x40 ) { iLen = ( (int)( iLen & 0x3f )<<16 ) + ( rdFrom.GetByte()<<8 ); iLen += rdFrom.GetByte(); // MUST be separate statement; cf. sequence point } else { iLen = ( (int)( iLen & 0x3f )<<8 ) + rdFrom.GetByte(); } } // no data? do nothing if ( !iLen ) return 0; // copy bytes uOffset = (SphAttr_t) wrTo.GetPos(); // FIXME! check bounds? BYTE dLen[4]; wrTo.PutBytes ( dLen, sphPackStrlen ( dLen, iLen ) ); while ( iLen>0 ) { const BYTE * pBuf = NULL; int iChunk = rdFrom.GetBytesZerocopy ( &pBuf, iLen ); wrTo.PutBytes ( pBuf, iChunk ); iLen -= iChunk; } return uOffset; } static const int DOCLIST_HINT_THRESH = 256; static int DoclistHintUnpack ( int iDocs, BYTE uHint ) { if ( iDocs=m_iMaxPos ) return false; // get leading value SphWordID_t iWord0 = m_pDict ? m_tReader.GetByte() : m_tReader.UnzipWordid(); if ( !iWord0 ) { // handle checkpoint m_tReader.UnzipOffset(); m_iWordID = 0; m_iDoclistOffset = 0; m_sWord[0] = '\0'; if ( m_tReader.GetPos()>=m_iMaxPos ) return false; iWord0 = m_pDict ? m_tReader.GetByte() : m_tReader.UnzipWordid(); // get next word } if ( !iWord0 ) return false; // some failure // get word entry if ( m_pDict ) { // unpack next word // must be in sync with DictEnd()! assert ( iWord0<=255 ); BYTE uPack = (BYTE) iWord0; int iMatch, iDelta; if ( uPack & 0x80 ) { iDelta = ( ( uPack>>4 ) & 7 ) + 1; iMatch = uPack & 15; } else { iDelta = uPack & 127; iMatch = m_tReader.GetByte(); } assert ( iMatch+iDelta<(int)sizeof(m_sWord)-1 ); assert ( iMatch<=(int)strlen(m_sWord) ); m_tReader.GetBytes ( m_sWord + iMatch, iDelta ); m_sWord [ iMatch+iDelta ] = '\0'; m_iDoclistOffset = m_tReader.UnzipOffset(); m_iDocs = m_tReader.UnzipInt(); m_iHits = m_tReader.UnzipInt(); m_iHint = 0; if ( m_iDocs>=DOCLIST_HINT_THRESH ) m_iHint = m_tReader.GetByte(); DoclistHintUnpack ( m_iDocs, (BYTE) m_iHint ); m_iWordID = (SphWordID_t) sphCRC32 ( GetWord() ); // set wordID for indexing } else { m_iWordID += iWord0; m_iDoclistOffset += m_tReader.UnzipOffset(); m_iDocs = m_tReader.UnzipInt(); m_iHits = m_tReader.UnzipInt(); } m_bHasHitlist = ( m_eHitless==SPH_HITLESS_NONE ) || ( m_eHitless==SPH_HITLESS_SOME && !( m_iDocs & 0x80000000 ) ); m_iDocs = m_eHitless==SPH_HITLESS_SOME ? ( m_iDocs & 0x7FFFFFFF ) : m_iDocs; return true; // FIXME? errorflag? } int CmpWord ( const CSphDictReader & tOther ) const { if ( m_pDict ) return strcmp ( m_sWord, tOther.m_sWord ); int iRes = 0; iRes = m_iWordIDtOther.m_iWordID ? 1 : iRes; return iRes; } BYTE * GetWord () const { return (BYTE *)m_sWord; } }; static ISphFilter * CreateMergeFilters ( CSphVector & dSettings, const CSphSchema & tSchema, const DWORD * pMvaPool ) { CSphString sError; ISphFilter * pResult = NULL; ARRAY_FOREACH ( i, dSettings ) { ISphFilter * pFilter = sphCreateFilter ( dSettings[i], tSchema, pMvaPool, sError ); if ( pFilter ) pResult = sphJoinFilters ( pResult, pFilter ); } return pResult; } class CSphMerger { private: CSphIndex_VLN * m_pOutputIndex; public: explicit CSphMerger ( CSphIndex_VLN * pOutputIndex ) : m_pOutputIndex ( pOutputIndex ) {} template < typename QWORD > static inline void PrepareQword ( QWORD & tQword, const CSphDictReader & tReader, int iDynamic, SphDocID_t iMinID, bool bWordDict ) //NOLINT { tQword.m_tDoc.Reset ( iDynamic ); tQword.m_iMinID = iMinID; tQword.m_tDoc.m_iDocID = iMinID; tQword.m_iDocs = tReader.m_iDocs; tQword.m_iHits = tReader.m_iHits; tQword.m_bHasHitlist = tReader.m_bHasHitlist; tQword.m_uHitPosition = 0; tQword.m_iHitlistPos = 0; if ( bWordDict ) tQword.m_rdDoclist.SeekTo ( tReader.m_iDoclistOffset, tReader.m_iHint ); } template < typename QWORD > static inline bool NextDocument ( QWORD & tQword, CSphIndex_VLN * pSourceIndex, CSphRowitem * pInline, ISphFilter * pFilter ) { for ( ;; ) { tQword.GetNextDoc ( pInline ); if ( tQword.m_tDoc.m_iDocID ) { tQword.SeekHitlist ( tQword.m_iHitlistPos ); if ( pFilter ) { CSphMatch tMatch; tMatch.m_iDocID = tQword.m_tDoc.m_iDocID; if ( pFilter->UsesAttrs() ) { if ( pInline ) tMatch.m_pDynamic = pInline; else { const DWORD * pInfo = pSourceIndex->FindDocinfo ( tQword.m_tDoc.m_iDocID ); tMatch.m_pStatic = pInfo?DOCINFO2ATTRS ( pInfo ):NULL; } } bool bResult = pFilter->Eval ( tMatch ); tMatch.m_pDynamic = NULL; if ( !bResult ) { while ( tQword.m_bHasHitlist && tQword.GetNextHit()!=EMPTY_HIT ); continue; } } return true; } else return false; } } template < typename QWORD > inline void TransferData ( QWORD & tQword, SphWordID_t iWordID, BYTE * sWord, CSphIndex_VLN * pSourceIndex, CSphRowitem * pInline, ISphFilter * pFilter ) { CSphAggregateHit tHit; tHit.m_iWordID = iWordID; tHit.m_sKeyword = sWord; tHit.m_dFieldMask.Unset(); while ( CSphMerger::NextDocument ( tQword, pSourceIndex, pInline, pFilter ) ) { if ( tQword.m_bHasHitlist ) TransferHits ( tQword, pInline, tHit ); else { // convert to aggregate if there is no hit-list tHit.m_iDocID = tQword.m_tDoc.m_iDocID - m_pOutputIndex->m_pMin->m_iDocID; tHit.m_dFieldMask = tQword.m_dQwordFields; tHit.SetAggrCount ( tQword.m_uMatchHits ); m_pOutputIndex->cidxHit ( &tHit, pInline ); } } } template < typename QWORD > inline void TransferHits ( QWORD & tQword, CSphRowitem * pInline, CSphAggregateHit & tHit ) { assert ( tQword.m_bHasHitlist ); tHit.m_iDocID = tQword.m_tDoc.m_iDocID - m_pOutputIndex->m_pMin->m_iDocID; for ( Hitpos_t uHit = tQword.GetNextHit(); uHit!=EMPTY_HIT; uHit = tQword.GetNextHit() ) { tHit.m_iWordPos = uHit; m_pOutputIndex->cidxHit ( &tHit, pInline ); } } template < typename QWORD > static inline void ConfigureQword ( QWORD & tQword, CSphAutofile & tHits, CSphAutofile & tDocs, CSphIndex_VLN * pIndex ) { bool bInline = pIndex->m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE; tQword.m_iInlineAttrs = bInline ? pIndex->m_tSchema.GetAttrsCount() : 0; tQword.m_pInlineFixup = bInline ? pIndex->m_pMin->m_pDynamic : NULL; tQword.m_rdHitlist.SetFile ( tHits ); tQword.m_rdHitlist.GetByte(); tQword.m_rdDoclist.SetFile ( tDocs ); tQword.m_rdDoclist.GetByte(); } }; template < typename QWORDDST, typename QWORDSRC > bool CSphIndex_VLN::MergeWords ( CSphIndex_VLN * pSrcIndex, ISphFilter * pFilter ) { assert ( m_pDict->GetSettings().m_bWordDict==pSrcIndex->m_pDict->GetSettings().m_bWordDict ); // setup writers m_wrDoclist.OpenFile ( GetIndexFileName("spd.tmp"), m_sLastError ); m_wrHitlist.OpenFile ( GetIndexFileName("spp.tmp"), m_sLastError ); BYTE bDummy = 1; m_wrDoclist.PutBytes ( &bDummy, 1 ); m_wrHitlist.PutBytes ( &bDummy, 1 ); m_pDict->HitblockBegin(); CSphDictReader tDstReader; CSphDictReader tSrcReader; const bool bWordDict = m_pDict->GetSettings().m_bWordDict; tDstReader.Setup ( GetIndexFileName("spi"), m_tWordlist.m_iCheckpointsPos, m_tSettings.m_eHitless, m_sLastError, ( bWordDict ? m_pDict : NULL ) ); tSrcReader.Setup ( pSrcIndex->GetIndexFileName("spi"), pSrcIndex->m_tWordlist.m_iCheckpointsPos, pSrcIndex->m_tSettings.m_eHitless, m_sLastError, ( bWordDict ? m_pDict : NULL ) ); if ( !m_sLastError.IsEmpty() ) return false; /// prepare for indexing m_tLastHit.m_iDocID = 0; m_tLastHit.m_iWordID = 0; m_tLastHit.m_sKeyword = m_sLastKeyword; m_tLastHit.m_iWordPos = EMPTY_HIT; const SphDocID_t iDstMinID = m_pMin->m_iDocID; const SphDocID_t iSrcMinID = pSrcIndex->m_pMin->m_iDocID; // correct infinum might be already set during spa merging. if ( !m_iMergeInfinum ) m_pMin->m_iDocID = Min ( iDstMinID, iSrcMinID ); else m_pMin->m_iDocID = m_iMergeInfinum; m_tWordlist.m_dCheckpoints.Reset ( 0 ); const int iDstDynamic = m_tSchema.GetDynamicSize(); const int iSrcDynamic = pSrcIndex->m_tSchema.GetDynamicSize(); /// setup qwords QWORDDST tDstQword ( false, false ); QWORDSRC tSrcQword ( false, false ); CSphAutofile fSrcDocs, fSrcHits; fSrcDocs.Open ( pSrcIndex->GetIndexFileName("spd"), SPH_O_READ, m_sLastError ); fSrcHits.Open ( pSrcIndex->GetIndexFileName("spp"), SPH_O_READ, m_sLastError ); CSphAutofile fDstDocs, fDstHits; fDstDocs.Open ( GetIndexFileName("spd"), SPH_O_READ, m_sLastError ); fDstHits.Open ( GetIndexFileName("spp"), SPH_O_READ, m_sLastError ); if ( !m_sLastError.IsEmpty() ) return false; CSphMerger::ConfigureQword ( tDstQword, fDstHits, fDstDocs, this ); CSphMerger::ConfigureQword ( tSrcQword, fSrcHits, fSrcDocs, pSrcIndex ); int iDstInlineSize = m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ? m_tSchema.GetRowSize() : 0; int iSrcInlineSize = pSrcIndex->m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ? pSrcIndex->m_tSchema.GetRowSize() : 0; CSphAutoArray dDstInline ( iDstInlineSize ); CSphAutoArray dSrcInline ( iSrcInlineSize ); /// merge CSphMerger tMerge(this); bool bDstWord = tDstReader.Read(); bool bSrcWord = tSrcReader.Read(); if ( m_pProgress ) { m_tProgress.m_ePhase = CSphIndexProgress::PHASE_MERGE; m_pProgress ( &m_tProgress, false ); } int iWords = 0; int iHitlistsDiscarded = 0; while ( bDstWord || bSrcWord ) { if ( iWords==1000 ) { m_tProgress.m_iWords += 1000; iWords = 0; m_pProgress ( &m_tProgress, false ); } const int iCmp = tDstReader.CmpWord ( tSrcReader ); if ( !bSrcWord || ( bDstWord && iCmp<0 ) ) { // transfer documents and hits from destination CSphMerger::PrepareQword ( tDstQword, tDstReader, iDstDynamic, iDstMinID, bWordDict ); tMerge.TransferData ( tDstQword, tDstReader.m_iWordID, tDstReader.GetWord(), this, dDstInline, pFilter ); iWords++; bDstWord = tDstReader.Read(); } else if ( !bDstWord || ( bSrcWord && iCmp>0 ) ) { // transfer documents and hits from source CSphMerger::PrepareQword ( tSrcQword, tSrcReader, iSrcDynamic, iSrcMinID, bWordDict ); tMerge.TransferData ( tSrcQword, tSrcReader.m_iWordID, tSrcReader.GetWord(), pSrcIndex, dSrcInline, NULL ); iWords++; bSrcWord = tSrcReader.Read(); } else // merge documents and hits inside the word { assert ( iCmp==0 ); bool bHitless = !tDstReader.m_bHasHitlist; if ( tDstReader.m_bHasHitlist!=tSrcReader.m_bHasHitlist ) { iHitlistsDiscarded++; bHitless = true; } CSphMerger::PrepareQword ( tDstQword, tDstReader, iDstDynamic, iDstMinID, bWordDict ); CSphMerger::PrepareQword ( tSrcQword, tSrcReader, iSrcDynamic, iSrcMinID, bWordDict ); CSphAggregateHit tHit; tHit.m_iWordID = tDstReader.m_iWordID; // !COMMIT m_sKeyword anyone? tHit.m_sKeyword = tDstReader.GetWord(); tHit.m_dFieldMask.Unset(); bool bDstDocs = CSphMerger::NextDocument ( tDstQword, this, dDstInline, pFilter ); bool bSrcDocs = true; tSrcQword.GetNextDoc ( dSrcInline ); tSrcQword.SeekHitlist ( tSrcQword.m_iHitlistPos ); while ( bDstDocs || bSrcDocs ) { if ( !bSrcDocs || ( bDstDocs && tDstQword.m_tDoc.m_iDocID < tSrcQword.m_tDoc.m_iDocID ) ) { // transfer hits from destination if ( bHitless ) { while ( tDstQword.m_bHasHitlist && tDstQword.GetNextHit()!=EMPTY_HIT ); tHit.m_iDocID = tDstQword.m_tDoc.m_iDocID - m_pMin->m_iDocID; tHit.m_dFieldMask = tDstQword.m_dQwordFields; tHit.SetAggrCount ( tDstQword.m_uMatchHits ); cidxHit ( &tHit, dSrcInline ); } else tMerge.TransferHits ( tDstQword, dDstInline, tHit ); bDstDocs = CSphMerger::NextDocument ( tDstQword, this, dDstInline, pFilter ); } else if ( !bDstDocs || ( bSrcDocs && tDstQword.m_tDoc.m_iDocID > tSrcQword.m_tDoc.m_iDocID ) ) { // transfer hits from source if ( bHitless ) { while ( tSrcQword.m_bHasHitlist && tSrcQword.GetNextHit()!=EMPTY_HIT ); tHit.m_iDocID = tSrcQword.m_tDoc.m_iDocID - m_pMin->m_iDocID; tHit.m_dFieldMask = tSrcQword.m_dQwordFields; tHit.SetAggrCount ( tSrcQword.m_uMatchHits ); cidxHit ( &tHit, dSrcInline ); } else tMerge.TransferHits ( tSrcQword, dSrcInline, tHit ); bSrcDocs = CSphMerger::NextDocument ( tSrcQword, pSrcIndex, dSrcInline, NULL ); } else { // merge hits inside the document assert ( bDstDocs ); assert ( bSrcDocs ); assert ( tDstQword.m_tDoc.m_iDocID==tSrcQword.m_tDoc.m_iDocID ); tHit.m_iDocID = tDstQword.m_tDoc.m_iDocID - m_pMin->m_iDocID; if ( bHitless ) { while ( tDstQword.m_bHasHitlist && tDstQword.GetNextHit()!=EMPTY_HIT ); while ( tSrcQword.m_bHasHitlist && tSrcQword.GetNextHit()!=EMPTY_HIT ); tHit.m_dFieldMask = tDstQword.m_dQwordFields | tSrcQword.m_dQwordFields; tHit.SetAggrCount ( tDstQword.m_uMatchHits + tSrcQword.m_uMatchHits ); cidxHit ( &tHit, dSrcInline ); } else { Hitpos_t uDstHit = tDstQword.GetNextHit(); Hitpos_t uSrcHit = tSrcQword.GetNextHit(); while ( uDstHit!=EMPTY_HIT || uSrcHit!=EMPTY_HIT ) { if ( uSrcHit==EMPTY_HIT || ( uDstHit!=EMPTY_HIT && uDstHitm_tStats.m_iTotalDocuments; m_tStats.m_iTotalBytes += pSrcIndex->m_tStats.m_iTotalBytes; m_tProgress.m_iWords += iWords; m_pProgress ( &m_tProgress, false ); if ( iHitlistsDiscarded ) m_sLastWarning.SetSprintf ( "discarded hitlists for %u words", iHitlistsDiscarded ); return true; } bool CSphIndex_VLN::Merge ( CSphIndex * pSource, CSphVector & dFilters, bool bMergeKillLists ) { assert ( pSource ); CSphIndex_VLN * pSrcIndex = dynamic_cast< CSphIndex_VLN * >( pSource ); assert ( pSrcIndex ); CSphString sWarning; if ( !Prealloc ( false, false, sWarning ) || !Preread() ) return false; if ( !pSrcIndex->Prealloc ( false, false, sWarning ) || !pSrcIndex->Preread() ) { m_sLastError.SetSprintf ( "source index preload failed: %s", pSrcIndex->GetLastError().cstr() ); return false; } const CSphSchema & tDstSchema = m_tSchema; const CSphSchema & tSrcSchema = pSrcIndex->m_tSchema; if ( !tDstSchema.CompareTo ( tSrcSchema, m_sLastError ) ) return false; if ( m_tSettings.m_eHitless!=pSrcIndex->m_tSettings.m_eHitless ) { m_sLastError = "hitless settings must be the same on merged indices"; return false; } // FIXME! if ( m_tSettings.m_eDocinfo!=pSrcIndex->m_tSettings.m_eDocinfo && !( m_bIsEmpty || pSrcIndex->m_bIsEmpty ) ) { m_sLastError.SetSprintf ( "docinfo storage on non-empty indexes must be the same (dst docinfo %d, empty %d, src docinfo %d, empty %d", m_tSettings.m_eDocinfo, m_bIsEmpty, pSrcIndex->m_tSettings.m_eDocinfo, pSrcIndex->m_bIsEmpty ); return false; } if ( m_pDict->GetSettings().m_bWordDict!=pSrcIndex->m_pDict->GetSettings().m_bWordDict ) { m_sLastError.SetSprintf ( "dictionary types must be the same (dst dict=%s, src dict=%s )", m_pDict->GetSettings().m_bWordDict ? "keywords" : "crc", pSrcIndex->m_pDict->GetSettings().m_bWordDict ? "keywords" : "crc" ); return false; } m_bWordDict = m_pDict->GetSettings().m_bWordDict; m_bMerging = true; int iStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); // create filters ISphFilter * pFilter = CreateMergeFilters ( dFilters, m_tSchema, GetMVAPool() ); DWORD nKillListSize = pSrcIndex->GetKillListSize (); if ( nKillListSize ) { CSphFilterSettings tKillListFilter; SphAttr_t * pKillList = pSrcIndex->GetKillList (); tKillListFilter.m_bExclude = true; tKillListFilter.m_eType = SPH_FILTER_VALUES; tKillListFilter.m_uMinValue = pKillList[0]; tKillListFilter.m_uMaxValue = pKillList[nKillListSize -1]; tKillListFilter.m_sAttrName = "@id"; tKillListFilter.SetExternalValues ( pKillList, nKillListSize ); ISphFilter * pKillListFilter = sphCreateFilter ( tKillListFilter, m_tSchema, GetMVAPool(), m_sLastError ); pFilter = sphJoinFilters ( pFilter, pKillListFilter ); } ///////////////////////////////////////// // merging attributes (.spa, .spm, .sps) ///////////////////////////////////////// CSphAutoreader tDstSPM, tSrcSPM, tDstSPS, tSrcSPS; if ( !tDstSPM.Open ( GetIndexFileName("spm"), m_sLastError ) || !tSrcSPM.Open ( pSrcIndex->GetIndexFileName("spm"), m_sLastError ) || !tDstSPS.Open ( GetIndexFileName("sps"), m_sLastError ) || !tSrcSPS.Open ( pSrcIndex->GetIndexFileName("sps"), m_sLastError ) ) { return false; } CSphWriter tSPMWriter, tSPSWriter; if ( !tSPMWriter.OpenFile ( GetIndexFileName("spm.tmp"), m_sLastError ) || !tSPSWriter.OpenFile ( GetIndexFileName("sps.tmp"), m_sLastError ) ) { return false; } tSPSWriter.PutByte ( 0 ); // dummy byte, to reserve magic zero offset /// merging CSphVector dMvaLocators; CSphVector dStringLocators; for ( int i=0; i dPhantomKiller; int iTotalDocuments = 0; bool bNeedInfinum = true; m_iMergeInfinum = 0; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && pSrcIndex->m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN ) { CSphWriter wrRows; if ( !wrRows.OpenFile ( GetIndexFileName("spa.tmp"), m_sLastError ) ) return false; AttrIndexBuilder_c tMinMax ( m_tSchema ); CSphVector dMinMaxBuffer ( tMinMax.GetExpectedSize ( m_tStats.m_iTotalDocuments + pSrcIndex->GetStats().m_iTotalDocuments ) ); tMinMax.Prepare ( dMinMaxBuffer.Begin(), dMinMaxBuffer.Begin() + dMinMaxBuffer.GetLength() ); m_uMinMaxIndex = 0; DWORD * pSrcRow = pSrcIndex->m_pDocinfo.GetWritePtr(); // they *can* be null if the respective index is empty DWORD * pDstRow = m_pDocinfo.GetWritePtr(); DWORD iSrcCount = 0; DWORD iDstCount = 0; tDstMVA.Read ( tDstSPM ); tSrcMVA.Read ( tSrcSPM ); CSphMatch tMatch; while ( iSrcCount < pSrcIndex->m_uDocinfo || iDstCount < m_uDocinfo ) { SphDocID_t iDstDocID, iSrcDocID; if ( iDstCount < m_uDocinfo ) { iDstDocID = DOCINFO2ID ( pDstRow ); if ( pFilter ) { tMatch.m_iDocID = iDstDocID; tMatch.m_pStatic = reinterpret_cast ( DOCINFO2ATTRS ( pDstRow ) ); tMatch.m_pDynamic = NULL; if ( !pFilter->Eval ( tMatch ) ) { pDstRow += iStride; iDstCount++; continue; } } } else iDstDocID = 0; if ( iSrcCount < pSrcIndex->m_uDocinfo ) iSrcDocID = DOCINFO2ID ( pSrcRow ); else iSrcDocID = 0; if ( ( iDstDocID && iDstDocID < iSrcDocID ) || ( iDstDocID && !iSrcDocID ) ) { while ( tDstMVA.m_iDocID && tDstMVA.m_iDocIDm_bIsEmpty ) { // one of the indexes has no documents; copy the .spa file from the other one CSphString sSrc = !m_bIsEmpty ? GetIndexFileName("spa") : pSrcIndex->GetIndexFileName("spa"); CSphString sDst = GetIndexFileName("spa.tmp"); if ( !CopyFile ( sSrc.cstr(), sDst.cstr(), m_sLastError ) ) return false; } else { // storage is not extern; create dummy .spa file CSphAutofile fdSpa ( GetIndexFileName("spa.tmp"), SPH_O_NEW, m_sLastError ); fdSpa.Close(); } // create phantom killlist filter if ( dPhantomKiller.GetLength() ) { CSphFilterSettings tKLF; tKLF.m_bExclude = true; tKLF.m_eType = SPH_FILTER_VALUES; tKLF.m_uMinValue = dPhantomKiller[0]; tKLF.m_uMaxValue = dPhantomKiller.Last(); tKLF.m_sAttrName = "@id"; tKLF.SetExternalValues ( &dPhantomKiller[0], dPhantomKiller.GetLength() ); ISphFilter * pSpaFilter = sphCreateFilter ( tKLF, m_tSchema, GetMVAPool(), m_sLastError ); pFilter = sphJoinFilters ( pFilter, pSpaFilter ); } CSphScopedPtr pScopedFilter ( pFilter ); CSphAutofile fdTmpDict ( GetIndexFileName("spi.tmp8"), SPH_O_NEW, m_sLastError, true ); CSphAutofile fdDict ( GetIndexFileName("spi.tmp"), SPH_O_NEW, m_sLastError ); if ( !m_sLastError.IsEmpty() || fdTmpDict.GetFD()<0 || fdDict.GetFD()<0 ) return false; m_pDict->DictBegin ( fdTmpDict.GetFD(), fdDict.GetFD(), 8*1024*1024 ); // FIXME? is this magic dict block constant any good?.. // merge dictionaries, doclists and hitlists if ( m_pDict->GetSettings().m_bWordDict ) { WITH_QWORD ( this, false, QwordDst, WITH_QWORD ( pSrcIndex, false, QwordSrc, { if ( !MergeWords < QwordDst, QwordSrc > ( pSrcIndex, pFilter ) ) return false; } ) ); } else { WITH_QWORD ( this, true, QwordDst, WITH_QWORD ( pSrcIndex, true, QwordSrc, { if ( !MergeWords < QwordDst, QwordSrc > ( pSrcIndex, pFilter ) ) return false; } ) ); } if ( iTotalDocuments ) m_tStats.m_iTotalDocuments = iTotalDocuments; // merge kill-lists CSphAutofile fdKillList ( GetIndexFileName("spk.tmp"), SPH_O_NEW, m_sLastError ); if ( fdKillList.GetFD () < 0 ) return false; if ( bMergeKillLists ) { // merge spk CSphVector dKillList; dKillList.Reserve ( GetKillListSize() + pSrcIndex->GetKillListSize() ); for ( int i = 0; i < pSrcIndex->GetKillListSize (); i++ ) dKillList.Add ( pSrcIndex->GetKillList () [i] ); for ( int i = 0; i < GetKillListSize (); i++ ) dKillList.Add ( GetKillList () [i] ); dKillList.Uniq (); m_iKillListSize = dKillList.GetLength (); if ( dKillList.GetLength() ) { if ( !sphWriteThrottled ( fdKillList.GetFD(), &dKillList[0], dKillList.GetLength()*sizeof(SphAttr_t), "kill_list", m_sLastError ) ) return false; } } else { m_iKillListSize = 0; } fdKillList.Close (); // finalize CSphAggregateHit tFlush; tFlush.m_iDocID = 0; tFlush.m_iWordID = 0; tFlush.m_sKeyword = (BYTE*)""; // tricky: assertion in cidxHit calls strcmp on this in case of empty index! tFlush.m_iWordPos = EMPTY_HIT; tFlush.m_dFieldMask.Unset(); cidxHit ( &tFlush, NULL ); cidxDone ( "sph.tmp", 8*1024*1024 ); // FIXME? is this magic dict block constant any good?.. // we're done if ( m_pProgress ) m_pProgress ( &m_tProgress, true ); return true; } ///////////////////////////////////////////////////////////////////////////// // THE SEARCHER ///////////////////////////////////////////////////////////////////////////// SphWordID_t CSphDictStar::GetWordID ( BYTE * pWord ) { char sBuf [ 16+3*SPH_MAX_WORD_LEN ]; assert ( strlen ( (const char*)pWord ) < 16+3*SPH_MAX_WORD_LEN ); m_pDict->ApplyStemmers ( pWord ); int iLen = strlen ( (const char*)pWord ); assert ( iLen < 16+3*SPH_MAX_WORD_LEN - 1 ); memcpy ( sBuf, pWord, iLen+1 ); if ( iLen ) { if ( sBuf[iLen-1]=='*' ) { sBuf[iLen-1] = '\0'; } else { sBuf[iLen] = MAGIC_WORD_TAIL; sBuf[iLen+1] = '\0'; } } return m_pDict->GetWordID ( (BYTE*)sBuf ); } SphWordID_t CSphDictStar::GetWordIDNonStemmed ( BYTE * pWord ) { return m_pDict->GetWordIDNonStemmed ( pWord ); } ////////////////////////////////////////////////////////////////////////// CSphDictStarV8::CSphDictStarV8 ( CSphDict * pDict, bool bPrefixes, bool bInfixes ) : CSphDictStar ( pDict ) , m_bPrefixes ( bPrefixes ) , m_bInfixes ( bInfixes ) { } SphWordID_t CSphDictStarV8::GetWordID ( BYTE * pWord ) { char sBuf [ 16+3*SPH_MAX_WORD_LEN ]; int iLen = strlen ( (const char*)pWord ); iLen = Min ( iLen, 16+3*SPH_MAX_WORD_LEN - 1 ); if ( !iLen ) return 0; bool bHeadStar = ( pWord[0]=='*' ); bool bTailStar = ( pWord[iLen-1]=='*' ) && ( iLen>1 ); if ( !bHeadStar && !bTailStar ) { m_pDict->ApplyStemmers ( pWord ); if ( IsStopWord ( pWord ) ) return 0; } iLen = strlen ( (const char*)pWord ); assert ( iLen < 16+3*SPH_MAX_WORD_LEN - 2 ); if ( !iLen || ( bHeadStar && iLen==1 ) ) return 0; if ( m_bInfixes ) { //////////////////////////////////// // infix or mixed infix+prefix mode //////////////////////////////////// // handle head star if ( bHeadStar ) { memcpy ( sBuf, pWord+1, iLen-- ); // chops star, copies trailing zero, updates iLen } else { sBuf[0] = MAGIC_WORD_HEAD; memcpy ( sBuf+1, pWord, ++iLen ); // copies everything incl trailing zero, updates iLen } // handle tail star if ( bTailStar ) { sBuf[--iLen] = '\0'; // got star, just chop it away } else { sBuf[iLen] = MAGIC_WORD_TAIL; // no star, add tail marker sBuf[++iLen] = '\0'; } } else { //////////////////// // prefix-only mode //////////////////// assert ( m_bPrefixes ); // always ignore head star in prefix mode if ( bHeadStar ) { pWord++; iLen--; } // handle tail star if ( !bTailStar ) { // exact word search request, always (ie. both in infix/prefix mode) mangles to "\1word\1" in v.8+ sBuf[0] = MAGIC_WORD_HEAD; memcpy ( sBuf+1, pWord, iLen ); sBuf[iLen+1] = MAGIC_WORD_TAIL; sBuf[iLen+2] = '\0'; iLen += 2; } else { // prefix search request, mangles to word itself (just chop away the star) memcpy ( sBuf, pWord, iLen ); sBuf[--iLen] = '\0'; } } // calc id for mangled word return m_pDict->GetWordID ( (BYTE*)sBuf, iLen, !bHeadStar && !bTailStar ); } ////////////////////////////////////////////////////////////////////////// SphWordID_t CSphDictExact::GetWordID ( BYTE * pWord ) { int iLen = strlen ( (const char*)pWord ); iLen = Min ( iLen, 16+3*SPH_MAX_WORD_LEN - 1 ); if ( !iLen ) return 0; if ( pWord[0]=='=' ) pWord[0] = MAGIC_WORD_HEAD_NONSTEMMED; if ( pWord[0]<' ' ) return m_pDict->GetWordIDNonStemmed ( pWord ); return m_pDict->GetWordID ( pWord ); } ///////////////////////////////////////////////////////////////////////////// inline bool sphGroupMatch ( SphAttr_t iGroup, const SphAttr_t * pGroups, int iGroups ) { if ( !pGroups ) return true; const SphAttr_t * pA = pGroups; const SphAttr_t * pB = pGroups+iGroups-1; if ( iGroup==*pA || iGroup==*pB ) return true; if ( iGroup<(*pA) || iGroup>(*pB) ) return false; while ( pB-pA>1 ) { const SphAttr_t * pM = pA + ((pB-pA)/2); if ( iGroup==(*pM) ) return true; if ( iGroup<(*pM) ) pB = pM; else pA = pM; } return false; } bool CSphIndex_VLN::EarlyReject ( CSphQueryContext * pCtx, CSphMatch & tMatch ) const { // might be needed even when we do not have a filter if ( pCtx->m_bLookupFilter ) CopyDocinfo ( pCtx, tMatch, FindDocinfo ( tMatch.m_iDocID ) ); pCtx->CalcFilter ( tMatch ); return pCtx->m_pFilter ? !pCtx->m_pFilter->Eval ( tMatch ) : false; } SphAttr_t * CSphIndex_VLN::GetKillList () const { return m_pKillList.GetWritePtr (); } bool CSphIndex_VLN::HasDocid ( SphDocID_t uDocid ) const { return FindDocinfo ( uDocid )!=NULL; } const DWORD * CSphIndex_VLN::FindDocinfo ( SphDocID_t uDocID ) const { if ( m_uDocinfo<=0 ) return NULL; assert ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN ); assert ( !m_pDocinfo.IsEmpty() ); assert ( m_tSchema.GetAttrsCount() ); int iStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); int iStart = 0; int iEnd = m_uDocinfo-1; if ( m_pDocinfoHash.GetLength() ) { SphDocID_t uFirst = DOCINFO2ID ( &m_pDocinfo[0] ); SphDocID_t uLast = DOCINFO2ID ( &m_pDocinfo[( int64_t ( m_uDocinfo-1 ) )*iStride] ); if ( uDocIDuLast ) return NULL; DWORD uHash = (DWORD)( ( uDocID - uFirst ) >> m_pDocinfoHash[0] ); if ( uHash > ( 1 << DOCINFO_HASH_BITS ) ) // possible in case of broken data, for instance return NULL; iStart = m_pDocinfoHash [ uHash+1 ]; iEnd = m_pDocinfoHash [ uHash+2 ] - 1; } const DWORD * pFound = NULL; if ( uDocID==DOCINFO2ID ( &m_pDocinfo [ (int64_t(iStart))*iStride ] ) ) { pFound = &m_pDocinfo [ (int64_t(iStart))*iStride ]; } else if ( uDocID==DOCINFO2ID ( &m_pDocinfo [ (int64_t(iEnd))*iStride ] ) ) { pFound = &m_pDocinfo [ (int64_t(iEnd))*iStride ]; } else { while ( iEnd-iStart>1 ) { // check if nothing found if ( uDocID < DOCINFO2ID ( &m_pDocinfo [ (int64_t(iStart))*iStride ] ) || uDocID > DOCINFO2ID ( &m_pDocinfo [ (int64_t(iEnd))*iStride ] ) ) break; assert ( uDocID > DOCINFO2ID ( &m_pDocinfo [ (int64_t(iStart))*iStride ] ) ); assert ( uDocID < DOCINFO2ID ( &m_pDocinfo [ (int64_t(iEnd))*iStride ] ) ); int iMid = iStart + (iEnd-iStart)/2; if ( uDocID==DOCINFO2ID ( &m_pDocinfo [ (int64_t(iMid))*iStride ] ) ) { pFound = &m_pDocinfo [ (int64_t(iMid))*iStride ]; break; } if ( uDocIDm_pOverrides ) ARRAY_FOREACH ( i, (*pCtx->m_pOverrides) ) { const CSphAttrOverride & tOverride = (*pCtx->m_pOverrides)[i]; // shortcut const CSphAttrOverride::IdValuePair_t * pEntry = tOverride.m_dValues.BinarySearch ( bind ( &CSphAttrOverride::IdValuePair_t::m_uDocID ), tMatch.m_iDocID ); tMatch.SetAttr ( pCtx->m_dOverrideOut[i], pEntry ? pEntry->m_uValue : sphGetRowAttr ( tMatch.m_pStatic, pCtx->m_dOverrideIn[i] ) ); } } static inline void CalcContextItems ( CSphMatch & tMatch, const CSphVector & dItems ) { ARRAY_FOREACH ( i, dItems ) { const CSphQueryContext::CalcItem_t & tCalc = dItems[i]; if ( tCalc.m_eType==SPH_ATTR_INTEGER ) tMatch.SetAttr ( tCalc.m_tLoc, tCalc.m_pExpr->IntEval(tMatch) ); else if ( tCalc.m_eType==SPH_ATTR_BIGINT ) tMatch.SetAttr ( tCalc.m_tLoc, tCalc.m_pExpr->Int64Eval(tMatch) ); else tMatch.SetAttrFloat ( tCalc.m_tLoc, tCalc.m_pExpr->Eval(tMatch) ); } } void CSphQueryContext::CalcFilter ( CSphMatch & tMatch ) const { CalcContextItems ( tMatch, m_dCalcFilter ); } void CSphQueryContext::CalcSort ( CSphMatch & tMatch ) const { CalcContextItems ( tMatch, m_dCalcSort ); } void CSphQueryContext::CalcFinal ( CSphMatch & tMatch ) const { CalcContextItems ( tMatch, m_dCalcFinal ); } void CSphQueryContext::SetStringPool ( const BYTE * pStrings ) { ARRAY_FOREACH ( i, m_dCalcFilter ) m_dCalcFilter[i].m_pExpr->SetStringPool ( pStrings ); ARRAY_FOREACH ( i, m_dCalcSort ) m_dCalcSort[i].m_pExpr->SetStringPool ( pStrings ); ARRAY_FOREACH ( i, m_dCalcFinal ) m_dCalcFinal[i].m_pExpr->SetStringPool ( pStrings ); } void CSphQueryContext::SetMVAPool ( const DWORD * pMva ) { ARRAY_FOREACH ( i, m_dCalcFilter ) m_dCalcFilter[i].m_pExpr->SetMVAPool ( pMva ); ARRAY_FOREACH ( i, m_dCalcSort ) m_dCalcSort[i].m_pExpr->SetMVAPool ( pMva ); ARRAY_FOREACH ( i, m_dCalcFinal ) m_dCalcFinal[i].m_pExpr->SetMVAPool ( pMva ); if ( m_pFilter ) m_pFilter->SetMVAStorage ( pMva ); if ( m_pWeightFilter ) m_pWeightFilter->SetMVAStorage ( pMva ); } bool CSphIndex_VLN::MatchExtended ( CSphQueryContext * pCtx, const CSphQuery * pQuery, int iSorters, ISphMatchSorter ** ppSorters, ISphRanker * pRanker, int iTag ) const { int iCutoff = pQuery->m_iCutoff; if ( iCutoff<=0 ) iCutoff = -1; // do searching CSphMatch * pMatch = pRanker->GetMatchesBuffer(); for ( ;; ) { int iMatches = pRanker->GetMatches(); if ( iMatches<=0 ) break; for ( int i=0; im_bLookupSort ) CopyDocinfo ( pCtx, pMatch[i], FindDocinfo ( pMatch[i].m_iDocID ) ); pCtx->CalcSort ( pMatch[i] ); if ( pCtx->m_pWeightFilter && !pCtx->m_pWeightFilter->Eval ( pMatch[i] ) ) continue; pMatch[i].m_iTag = iTag; bool bRand = false; bool bNewMatch = false; for ( int iSorter=0; iSorterm_bRandomize ) { bRand = true; pMatch[i].m_iWeight = ( sphRand() & 0xffff ); if ( pCtx->m_pWeightFilter && !pCtx->m_pWeightFilter->Eval ( pMatch[i] ) ) break; } bNewMatch |= ppSorters[iSorter]->Push ( pMatch[i] ); } if ( bNewMatch ) if ( --iCutoff==0 ) break; } if ( iCutoff==0 ) break; } return true; } ////////////////////////////////////////////////////////////////////////// bool CSphIndex_VLN::MultiScan ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const { assert ( pQuery->m_sQuery.IsEmpty() ); assert ( iTag>=0 ); // check if index is ready if ( !m_pPreread || !*m_pPreread ) { pResult->m_sError = "index not preread"; return false; } // check if index supports scans if ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN || !m_tSchema.GetAttrsCount() ) { pResult->m_sError = "fullscan requires extern docinfo"; return false; } // check if index has data if ( m_bIsEmpty || m_uDocinfo<=0 || m_pDocinfo.IsEmpty() ) return true; // start counting int64_t tmQueryStart = sphMicroTimer(); // select the sorter with max schema int iMaxSchemaSize = -1; int iMaxSchemaIndex = -1; for ( int i=0; iGetSchema().GetRowSize() > iMaxSchemaSize ) { iMaxSchemaSize = ppSorters[i]->GetSchema().GetRowSize(); iMaxSchemaIndex = i; } // setup calculations and result schema CSphQueryContext tCtx; if ( !tCtx.SetupCalc ( pResult, ppSorters[iMaxSchemaIndex]->GetSchema(), m_tSchema, GetMVAPool() ) ) return false; // set string pool for string on_sort expression fix up tCtx.SetStringPool ( m_pStrings.GetWritePtr() ); // setup filters if ( !tCtx.CreateFilters ( true, &pQuery->m_dFilters, pResult->m_tSchema, GetMVAPool(), pResult->m_sError ) ) return false; if ( !tCtx.CreateFilters ( true, pExtraFilters, pResult->m_tSchema, GetMVAPool(), pResult->m_sError ) ) return false; // check if we can early reject the whole index if ( tCtx.m_pFilter && m_uDocinfoIndex ) { DWORD uStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); DWORD * pMinEntry = const_cast ( &m_pDocinfoIndex [ 2*m_uDocinfoIndex*uStride ] ); DWORD * pMaxEntry = pMinEntry + uStride; if ( !tCtx.m_pFilter->EvalBlock ( pMinEntry, pMaxEntry ) ) { pResult->m_iQueryTime += (int)( ( sphMicroTimer()-tmQueryStart )/1000 ); return true; } } // setup lookup tCtx.m_bLookupFilter = false; tCtx.m_bLookupSort = true; // setup sorters vs. MVA for ( int i=0; iSetMVAPool ( m_pMva.GetWritePtr() ); (ppSorters[i])->SetStringPool ( m_pStrings.GetWritePtr() ); } // setup overrides if ( !tCtx.SetupOverrides ( pQuery, pResult, m_tSchema ) ) return false; // prepare to work them rows bool bRandomize = ppSorters[0]->m_bRandomize; CSphMatch tMatch; tMatch.Reset ( pResult->m_tSchema.GetDynamicSize() ); tMatch.m_iWeight = pQuery->GetIndexWeight ( m_sIndexName.cstr() ); tMatch.m_iTag = tCtx.m_dCalcFinal.GetLength() ? -1 : iTag; // optimize direct lookups by id // run full scan with block and row filtering for everything else if ( pQuery->m_dFilters.GetLength()==1 && pQuery->m_dFilters[0].m_eType==SPH_FILTER_VALUES && pQuery->m_dFilters[0].m_bExclude==false && pQuery->m_dFilters[0].m_sAttrName=="@id" && !pExtraFilters ) { // run id lookups for ( int i=0; im_dFilters[0].GetNumValues(); i++ ) { SphDocID_t uDocid = (SphDocID_t) pQuery->m_dFilters[0].GetValue(i); const DWORD * pRow = FindDocinfo ( uDocid ); if ( !pRow ) continue; assert ( uDocid==DOCINFO2ID(pRow) ); tMatch.m_iDocID = uDocid; CopyDocinfo ( &tCtx, tMatch, pRow ); // submit match to sorters tCtx.CalcSort ( tMatch ); if ( bRandomize ) tMatch.m_iWeight = ( sphRand() & 0xffff ); for ( int iSorter=0; iSorterPush ( tMatch ); } } else { // do scan DWORD uStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); DWORD uStart = pQuery->m_bReverseScan ? ( m_uDocinfoIndex-1 ) : 0; int iStep = pQuery->m_bReverseScan ? -1 : 1; int iCutoff = pQuery->m_iCutoff; if ( iCutoff<=0 ) iCutoff = -1; for ( DWORD uIndexEntry=uStart; uIndexEntryEvalBlock ( pMin, pMax ) ) continue; // row-level filtering const DWORD * pBlockStart = &m_pDocinfo [ ( int64_t ( uIndexEntry ) )*uStride*DOCINFO_INDEX_FREQ ]; const DWORD * pBlockEnd = &m_pDocinfo [ ( int64_t ( Min ( ( uIndexEntry+1 )*DOCINFO_INDEX_FREQ, m_uDocinfo ) - 1 ) )*uStride ]; for ( const DWORD * pDocinfo=pBlockStart; pDocinfo<=pBlockEnd; pDocinfo+=uStride ) { tMatch.m_iDocID = DOCINFO2ID ( pDocinfo ); CopyDocinfo ( &tCtx, tMatch, pDocinfo ); // early filter only (no late filters in full-scan because of no @weight) tCtx.CalcFilter ( tMatch ); if ( tCtx.m_pFilter && !tCtx.m_pFilter->Eval ( tMatch ) ) continue; // submit match to sorters tCtx.CalcSort ( tMatch ); if ( bRandomize ) tMatch.m_iWeight = ( sphRand() & 0xffff ); bool bNewMatch = false; for ( int iSorter=0; iSorterPush ( tMatch ); // handle cutoff if ( bNewMatch && --iCutoff==0 ) { uIndexEntry = m_uDocinfoIndex; // outer break break; } } } } // do final expression calculations if ( tCtx.m_dCalcFinal.GetLength() ) for ( int iSorter=0; iSorterFinalize(); const int iCount = pTop->GetLength (); if ( !iCount ) continue; CSphMatch * const pTail = pHead + iCount; for ( CSphMatch * pCur=pHead; pCurm_iTag<0 ) { tCtx.CalcFinal ( *pCur ); pCur->m_iTag = iTag; } } } // done pResult->m_pMva = m_pMva.GetWritePtr(); pResult->m_pStrings = m_pStrings.GetWritePtr(); pResult->m_iQueryTime += (int)( ( sphMicroTimer()-tmQueryStart )/1000 ); return true; } ////////////////////////////////////////////////////////////////////////////// ISphQword * DiskIndexQwordSetup_c::QwordSpawn ( const XQKeyword_t & tWord ) const { WITH_QWORD ( m_pIndex, false, Qword, return new Qword ( tWord.m_bExpanded, tWord.m_bExcluded ) ); return NULL; } bool DiskIndexQwordSetup_c::QwordSetup ( ISphQword * pWord ) const { WITH_QWORD ( m_pIndex, false, Qword, return Setup ( pWord ) ); return false; } template < class Qword > bool DiskIndexQwordSetup_c::Setup ( ISphQword * pWord ) const { Qword * pMyWord = dynamic_cast ( pWord ); if ( !pMyWord ) return false; Qword & tWord = *pMyWord; // setup attrs tWord.m_tDoc.Reset ( m_iDynamicRowitems ); tWord.m_iMinID = m_tMin.m_iDocID; tWord.m_tDoc.m_iDocID = m_tMin.m_iDocID; if ( m_eDocinfo==SPH_DOCINFO_INLINE ) { tWord.m_iInlineAttrs = m_iInlineRowitems; tWord.m_pInlineFixup = m_tMin.m_pDynamic; } else { tWord.m_iInlineAttrs = 0; tWord.m_pInlineFixup = NULL; } // setup stats tWord.m_iDocs = 0; tWord.m_iHits = 0; CSphIndex_VLN * pIndex = (CSphIndex_VLN *)m_pIndex; // binary search through checkpoints for a one whose range matches word ID assert ( pIndex->m_pPreread && *pIndex->m_pPreread ); assert ( !pIndex->m_bPreloadWordlist || !pIndex->m_tWordlist.m_pBuf.IsEmpty() ); // empty index? if ( !pIndex->m_tWordlist.m_dCheckpoints.GetLength() ) return false; const char * sWord = tWord.m_sDictWord.cstr(); const bool bWordDict = pIndex->m_pDict->GetSettings().m_bWordDict; int iWordLen = sWord ? strlen ( sWord ) : 0; if ( pIndex->m_bEnableStar && bWordDict && tWord.m_sWord.Ends("*") ) iWordLen = Max ( iWordLen-1, 0 ); // leading special symbols trimming if ( tWord.m_sDictWord.Begins("*") ) { sWord++; iWordLen = Max ( iWordLen-1, 0 ); } const CSphWordlistCheckpoint * pCheckpoint = pIndex->m_tWordlist.FindCheckpoint ( sWord, iWordLen, tWord.m_iWordID, false ); if ( !pCheckpoint ) return false; // decode wordlist chunk const BYTE * pBuf = pIndex->m_tWordlist.AcquireDict ( pCheckpoint, m_tWordlist.GetFD(), m_pDictBuf ); assert ( pBuf ); WordDictInfo_t tResWord; WordReaderContext_t tReaderCtx; const bool bWordFound = bWordDict ? pIndex->m_tWordlist.GetWord ( pBuf, sWord, iWordLen, tResWord, false, tReaderCtx )!=NULL : pIndex->m_tWordlist.GetWord ( pBuf, tWord.m_iWordID, tResWord ); if ( bWordFound ) { const ESphHitless eMode = pIndex->m_tSettings.m_eHitless; tWord.m_iDocs = eMode==SPH_HITLESS_SOME ? ( tResWord.m_iDocs & 0x7FFFFFFF ) : tResWord.m_iDocs; tWord.m_iHits = tResWord.m_iHits; tWord.m_bHasHitlist = ( eMode==SPH_HITLESS_NONE ) || ( eMode==SPH_HITLESS_SOME && !( tResWord.m_iDocs & 0x80000000 ) ); if ( m_bSetupReaders ) { tWord.m_rdDoclist.SetBuffers ( g_iReadBuffer, g_iReadUnhinted ); tWord.m_rdDoclist.SetFile ( m_tDoclist ); tWord.m_rdDoclist.SeekTo ( tResWord.m_uOff, tResWord.m_iDoclistHint ); tWord.m_rdHitlist.SetBuffers ( g_iReadBuffer, g_iReadUnhinted ); tWord.m_rdHitlist.SetFile ( m_tHitlist ); } } return bWordFound; } ////////////////////////////////////////////////////////////////////////////// bool CSphIndex_VLN::Lock () { CSphString sName = GetIndexFileName("spl"); sphLogDebug ( "Locking the index via file %s", sName.cstr() ); if ( m_iLockFD<0 ) { m_iLockFD = ::open ( sName.cstr(), SPH_O_NEW, 0644 ); if ( m_iLockFD<0 ) { m_sLastError.SetSprintf ( "failed to open %s: %s", sName.cstr(), strerror(errno) ); sphLogDebug ( "failed to open %s: %s", sName.cstr(), strerror(errno) ); return false; } } if ( !sphLockEx ( m_iLockFD, false ) ) { m_sLastError.SetSprintf ( "failed to lock %s: %s", sName.cstr(), strerror(errno) ); ::close ( m_iLockFD ); m_iLockFD = -1; return false; } sphLogDebug ( "lock %s success", sName.cstr() ); return true; } void CSphIndex_VLN::Unlock() { CSphString sName = GetIndexFileName("spl"); sphLogDebug ( "Unlocking the index (lock %s)", sName.cstr() ); if ( m_iLockFD>=0 ) { sphLogDebug ( "File ID ok, closing lock FD %d, unlinking %s", m_iLockFD, sName.cstr() ); ::close ( m_iLockFD ); ::unlink ( sName.cstr() ); m_iLockFD = -1; } } bool CSphIndex_VLN::Mlock () { bool bRes = true; bRes &= m_pDocinfo.Mlock ( "docinfo", m_sLastError ); if ( m_bPreloadWordlist ) bRes &= m_tWordlist.m_pBuf.Mlock ( "wordlist", m_sLastError ); bRes &= m_pMva.Mlock ( "mva", m_sLastError ); bRes &= m_pStrings.Mlock ( "strings", m_sLastError ); return bRes; } void CSphIndex_VLN::Dealloc () { if ( !m_bPreallocated ) return; m_tDoclistFile.Close (); m_tHitlistFile.Close (); m_pDocinfo.Reset (); m_pDocinfoHash.Reset (); m_pMva.Reset (); m_pStrings.Reset (); m_pKillList.Reset (); m_tWordlist.Reset (); m_uDocinfo = 0; m_uMinMaxIndex = 0; m_tSettings.m_eDocinfo = SPH_DOCINFO_NONE; m_bPreallocated = false; SafeDelete ( m_pTokenizer ); SafeDelete ( m_pDict ); if ( m_iIndexTag>=0 && g_pMvaArena ) g_MvaArena.TaggedFreeTag ( m_iIndexTag ); m_iIndexTag = -1; m_pPreread = NULL; m_pAttrsStatus = NULL; #ifndef NDEBUG m_dShared.Reset (); #endif } void LoadIndexSettings ( CSphIndexSettings & tSettings, CSphReader & tReader, DWORD uVersion ) { if ( uVersion>=8 ) { tSettings.m_iMinPrefixLen = tReader.GetDword (); tSettings.m_iMinInfixLen = tReader.GetDword (); } else if ( uVersion>=6 ) { bool bPrefixesOnly = ( tReader.GetByte ()!=0 ); tSettings.m_iMinPrefixLen = tReader.GetDword (); tSettings.m_iMinInfixLen = 0; if ( !bPrefixesOnly ) Swap ( tSettings.m_iMinPrefixLen, tSettings.m_iMinInfixLen ); } if ( uVersion>=9 ) { tSettings.m_bHtmlStrip = !!tReader.GetByte (); tSettings.m_sHtmlIndexAttrs = tReader.GetString (); tSettings.m_sHtmlRemoveElements = tReader.GetString (); } if ( uVersion>=12 ) tSettings.m_bIndexExactWords = !!tReader.GetByte (); if ( uVersion>=18 ) tSettings.m_eHitless = (ESphHitless)tReader.GetDword(); if ( uVersion>=19 ) tSettings.m_eHitFormat = (ESphHitFormat)tReader.GetDword(); else // force plain format for old indices tSettings.m_eHitFormat = SPH_HIT_FORMAT_PLAIN; if ( uVersion>=21 ) tSettings.m_bIndexSP = !!tReader.GetByte(); if ( uVersion>=22 ) { tSettings.m_sZones = tReader.GetString(); if ( uVersion<25 && !tSettings.m_sZones.IsEmpty() ) tSettings.m_sZones.SetSprintf ( "%s*", tSettings.m_sZones.cstr() ); } if ( uVersion>=23 ) { tSettings.m_iBoundaryStep = (int)tReader.GetDword(); tSettings.m_iStopwordStep = (int)tReader.GetDword(); } } bool CSphIndex_VLN::LoadHeader ( const char * sHeaderName, bool bStripPath, CSphString & sWarning ) { const int MAX_HEADER_SIZE = 32768; CSphAutoArray dCacheInfo ( MAX_HEADER_SIZE ); CSphAutoreader rdInfo ( dCacheInfo, MAX_HEADER_SIZE ); // to avoid mallocs if ( !rdInfo.Open ( sHeaderName, m_sLastError ) ) return false; // version DWORD uHeader = rdInfo.GetDword (); if ( uHeader!=INDEX_MAGIC_HEADER ) { m_sLastError.SetSprintf ( "%s is invalid header file (too old index version?)", sHeaderName ); return false; } m_uVersion = rdInfo.GetDword(); if ( m_uVersion==0 || m_uVersion>INDEX_FORMAT_VERSION ) { m_sLastError.SetSprintf ( "%s is v.%d, binary is v.%d", sHeaderName, m_uVersion, INDEX_FORMAT_VERSION ); return false; } // bits m_bUse64 = false; if ( m_uVersion>=2 ) m_bUse64 = ( rdInfo.GetDword ()!=0 ); if ( m_bUse64!=USE_64BIT ) { #if USE_64BIT // TODO: may be do this param conditional and push it into the config? m_bId32to64 = true; #else m_sLastError.SetSprintf ( "'%s' is id%d, and this binary is id%d", GetIndexFileName("sph").cstr(), m_bUse64 ? 64 : 32, USE_64BIT ? 64 : 32 ); return false; #endif } // docinfo m_tSettings.m_eDocinfo = (ESphDocinfo) rdInfo.GetDword(); // schema // 4th arg means that inline attributes need be dynamic in searching time too ReadSchema ( rdInfo, m_tSchema, m_uVersion, m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ); // check schema for dupes for ( int iAttr=1; iAttrReset ( m_tSchema.GetRowSize() ); if ( m_uVersion>=2 ) m_pMin->m_iDocID = (SphDocID_t) rdInfo.GetOffset (); // v2+; losing high bits when !USE_64 is intentional, check is performed on bUse64 above else m_pMin->m_iDocID = rdInfo.GetDword(); // v1 if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) rdInfo.GetBytes ( m_pMin->m_pDynamic, sizeof(CSphRowitem)*m_tSchema.GetRowSize() ); // wordlist checkpoints m_tWordlist.m_iCheckpointsPos = rdInfo.GetOffset(); m_tWordlist.m_dCheckpoints.Reset ( rdInfo.GetDword() ); // index stats m_tStats.m_iTotalDocuments = rdInfo.GetDword (); m_tStats.m_iTotalBytes = rdInfo.GetOffset (); LoadIndexSettings ( m_tSettings, rdInfo, m_uVersion ); if ( m_uVersion<9 ) m_bStripperInited = false; if ( m_uVersion>=9 ) { // tokenizer stuff CSphTokenizerSettings tSettings; LoadTokenizerSettings ( rdInfo, tSettings, m_uVersion, sWarning ); if ( bStripPath ) StripPath ( tSettings.m_sSynonymsFile ); ISphTokenizer * pTokenizer = ISphTokenizer::Create ( tSettings, m_sLastError ); if ( !pTokenizer ) return false; // dictionary stuff CSphDictSettings tDictSettings; LoadDictionarySettings ( rdInfo, tDictSettings, m_uVersion, sWarning ); if ( m_bId32to64 ) tDictSettings.m_bCrc32 = true; if ( bStripPath ) { StripPath ( tDictSettings.m_sStopwords ); StripPath ( tDictSettings.m_sWordforms ); } CSphDict * pDict = tDictSettings.m_bWordDict ? sphCreateDictionaryKeywords ( tDictSettings, pTokenizer, m_sLastError, m_sIndexName.cstr() ) : sphCreateDictionaryCRC ( tDictSettings, pTokenizer, m_sLastError, m_sIndexName.cstr() ); if ( !pDict ) return false; SetDictionary ( pDict ); ISphTokenizer * pTokenFilter = ISphTokenizer::CreateTokenFilter ( pTokenizer, pDict->GetMultiWordforms () ); SetTokenizer ( pTokenFilter ? pTokenFilter : pTokenizer ); } else { if ( m_bId32to64 ) { m_sLastError.SetSprintf ( "too old id32 index; can not be loaded by this id64 binary" ); return false; } } if ( m_uVersion>=10 ) m_iKillListSize = rdInfo.GetDword (); if ( m_uVersion>=20 ) m_uMinMaxIndex = rdInfo.GetDword (); if ( rdInfo.GetErrorFlag() ) m_sLastError.SetSprintf ( "%s: failed to parse header (unexpected eof)", sHeaderName ); return !rdInfo.GetErrorFlag(); } void CSphIndex_VLN::DebugDumpHeader ( FILE * fp, const char * sHeaderName, bool bConfig ) { CSphString sWarning; if ( !LoadHeader ( sHeaderName, false, sWarning ) ) { fprintf ( fp, "FATAL: failed to load header: %s.\n", m_sLastError.cstr() ); return; } if ( !sWarning.IsEmpty () ) fprintf ( fp, "WARNING: %s\n", sWarning.cstr () ); /////////////////////////////////////////////// // print header in index config section format /////////////////////////////////////////////// if ( bConfig ) { fprintf ( fp, "\nsource $dump\n{\n" ); fprintf ( fp, "\tsql_query = SELECT id \\\n" ); ARRAY_FOREACH ( i, m_tSchema.m_dFields ) fprintf ( fp, "\t, %s \\\n", m_tSchema.m_dFields[i].m_sName.cstr() ); for ( int i=0; iGetSettings (); fprintf ( fp, "\tcharset_type = %s\n", tSettings.m_iType==TOKENIZER_SBCS ? "sbcs" : "utf-8" ); fprintf ( fp, "\tcharset_table = %s\n", tSettings.m_sCaseFolding.cstr () ); if ( tSettings.m_iMinWordLen>1 ) fprintf ( fp, "\tmin_word_len = %d\n", tSettings.m_iMinWordLen ); if ( tSettings.m_iNgramLen && !tSettings.m_sNgramChars.IsEmpty() ) fprintf ( fp, "\tngram_len = %d\nngram_chars = %s\n", tSettings.m_iNgramLen, tSettings.m_sNgramChars.cstr () ); if ( !tSettings.m_sSynonymsFile.IsEmpty() ) fprintf ( fp, "\texceptions = %s\n", tSettings.m_sSynonymsFile.cstr () ); if ( !tSettings.m_sBoundary.IsEmpty() ) fprintf ( fp, "\tphrase_boundary = %s\n", tSettings.m_sBoundary.cstr () ); if ( !tSettings.m_sIgnoreChars.IsEmpty() ) fprintf ( fp, "\tignore_chars = %s\n", tSettings.m_sIgnoreChars.cstr () ); if ( !tSettings.m_sBlendChars.IsEmpty() ) fprintf ( fp, "\tblend_chars = %s\n", tSettings.m_sBlendChars.cstr () ); if ( !tSettings.m_sBlendMode.IsEmpty() ) fprintf ( fp, "\tblend_mode = %s\n", tSettings.m_sBlendMode.cstr () ); } if ( m_pDict ) { const CSphDictSettings & tSettings = m_pDict->GetSettings (); if ( tSettings.m_bWordDict ) fprintf ( fp, "\tdict = keywords\n" ); if ( !tSettings.m_sMorphology.IsEmpty() ) fprintf ( fp, "\tmorphology = %s\n", tSettings.m_sMorphology.cstr () ); if ( !tSettings.m_sStopwords.IsEmpty() ) fprintf ( fp, "\tstopwords = %s\n", tSettings.m_sStopwords.cstr () ); if ( !tSettings.m_sWordforms.IsEmpty() ) fprintf ( fp, "\twordforms: %s\n", tSettings.m_sWordforms.cstr () ); if ( tSettings.m_iMinStemmingLen>1 ) fprintf ( fp, "\tmin_stemming_len = %d\n", tSettings.m_iMinStemmingLen ); } fprintf ( fp, "}\n" ); return; } /////////////////////////////////////////////// // print header and stats in "readable" format /////////////////////////////////////////////// fprintf ( fp, "version: %d\n", m_uVersion ); fprintf ( fp, "idbits: %d\n", m_bUse64 ? 64 : 32 ); fprintf ( fp, "docinfo: " ); switch ( m_tSettings.m_eDocinfo ) { case SPH_DOCINFO_NONE: fprintf ( fp, "none\n" ); break; case SPH_DOCINFO_INLINE: fprintf ( fp, "inline\n" ); break; case SPH_DOCINFO_EXTERN: fprintf ( fp, "extern\n" ); break; default: fprintf ( fp, "unknown (value=%d)\n", m_tSettings.m_eDocinfo ); break; } fprintf ( fp, "fields: %d\n", m_tSchema.m_dFields.GetLength() ); ARRAY_FOREACH ( i, m_tSchema.m_dFields ) fprintf ( fp, " field %d: %s\n", i, m_tSchema.m_dFields[i].m_sName.cstr() ); fprintf ( fp, "attrs: %d\n", m_tSchema.GetAttrsCount() ); for ( int i=0; iGetSettings (); fprintf ( fp, "tokenizer-type: %d\n", tSettings.m_iType ); fprintf ( fp, "tokenizer-case-folding: %s\n", tSettings.m_sCaseFolding.cstr () ); fprintf ( fp, "tokenizer-min-word-len: %d\n", tSettings.m_iMinWordLen ); fprintf ( fp, "tokenizer-ngram-chars: %s\n", tSettings.m_sNgramChars.cstr () ); fprintf ( fp, "tokenizer-ngram-len: %d\n", tSettings.m_iNgramLen ); fprintf ( fp, "tokenizer-exceptions: %s\n", tSettings.m_sSynonymsFile.cstr () ); fprintf ( fp, "tokenizer-phrase-boundary: %s\n", tSettings.m_sBoundary.cstr () ); fprintf ( fp, "tokenizer-ignore-chars: %s\n", tSettings.m_sIgnoreChars.cstr () ); fprintf ( fp, "tokenizer-blend-chars: %s\n", tSettings.m_sBlendChars.cstr () ); fprintf ( fp, "tokenizer-blend-mode: %s\n", tSettings.m_sBlendMode.cstr () ); } if ( m_pDict ) { const CSphDictSettings & tSettings = m_pDict->GetSettings (); fprintf ( fp, "dictionary-morphology: %s\n", tSettings.m_sMorphology.cstr () ); fprintf ( fp, "dictionary-stopwords: %s\n", tSettings.m_sStopwords.cstr () ); fprintf ( fp, "dictionary-wordforms: %s\n", tSettings.m_sWordforms.cstr () ); fprintf ( fp, "min-stemming-len: %d\n", tSettings.m_iMinStemmingLen ); } fprintf ( fp, "killlist-size: %d\n", m_iKillListSize ); } void CSphIndex_VLN::DebugDumpDocids ( FILE * fp ) { if ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN ) { fprintf ( fp, "FATAL: docids dump only supported for docinfo=extern\n" ); return; } const int iRowStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); const DWORD uNumMinMaxRow = ( m_uVersion>=20 ) ? ( 2*(1+m_uDocinfoIndex)*iRowStride ) : 0; const int64_t uNumRows = (m_pDocinfo.GetNumEntries()-uNumMinMaxRow) / iRowStride; // all 32bit, as we don't expect 2 billion documents per single physical index const uint64_t uDocinfoSize = iRowStride*size_t(m_uDocinfo)*sizeof(DWORD); const uint64_t uMinmaxSize = uNumMinMaxRow*sizeof(CSphRowitem); fprintf ( fp, "docinfo-bytes: docinfo="UINT64_FMT", min-max="UINT64_FMT", total="UINT64_FMT"\n" , uDocinfoSize, uMinmaxSize, (uint64_t)m_pDocinfo.GetLength() ); fprintf ( fp, "docinfo-stride: %d\n", (int)(iRowStride*sizeof(DWORD)) ); fprintf ( fp, "docinfo-rows: "INT64_FMT"\n", uNumRows ); if ( !m_pDocinfo.GetNumEntries() ) return; DWORD * pDocinfo = m_pDocinfo.GetWritePtr(); for ( DWORD uRow=0; uRow ( fp, sKeyword, bID ) ); } template < class Qword > void CSphIndex_VLN::DumpHitlist ( FILE * fp, const char * sKeyword, bool bID ) { // get keyword id SphWordID_t uWordID = 0; BYTE * sTok = NULL; if ( !bID ) { CSphString sBuf ( sKeyword ); m_pTokenizer->SetBuffer ( (BYTE*)sBuf.cstr(), strlen ( sBuf.cstr() ) ); sTok = m_pTokenizer->GetToken(); if ( !sTok ) sphDie ( "keyword=%s, no token (too short?)", sKeyword ); uWordID = m_pDict->GetWordID ( sTok ); if ( !uWordID ) sphDie ( "keyword=%s, tok=%s, no wordid (stopped?)", sKeyword, sTok ); fprintf ( fp, "keyword=%s, tok=%s, wordid="UINT64_FMT"\n", sKeyword, sTok, uint64_t(uWordID) ); } else { uWordID = (SphWordID_t) strtoull ( sKeyword, NULL, 10 ); if ( !uWordID ) sphDie ( "failed to convert keyword=%s to id (must be integer)", sKeyword ); fprintf ( fp, "wordid="UINT64_FMT"\n", uint64_t(uWordID) ); } // open files CSphAutofile tDoclist, tHitlist, tWordlist; if ( tDoclist.Open ( GetIndexFileName("spd"), SPH_O_READ, m_sLastError ) < 0 ) sphDie ( "failed to open doclist: %s", m_sLastError.cstr() ); if ( tHitlist.Open ( GetIndexFileName ( m_uVersion>=3 ? "spp" : "spd" ), SPH_O_READ, m_sLastError ) < 0 ) sphDie ( "failed to open hitlist: %s", m_sLastError.cstr() ); if ( tWordlist.Open ( GetIndexFileName ( "spi" ), SPH_O_READ, m_sLastError ) < 0 ) sphDie ( "failed to open wordlist: %s", m_sLastError.cstr() ); // aim DiskIndexQwordSetup_c tTermSetup ( tDoclist, tHitlist, tWordlist, m_bPreloadWordlist ? 0 : m_tWordlist.m_iMaxChunk ); tTermSetup.m_pDict = m_pDict; tTermSetup.m_pIndex = this; tTermSetup.m_eDocinfo = m_tSettings.m_eDocinfo; tTermSetup.m_tMin.Clone ( *m_pMin, m_tSchema.GetRowSize() ); tTermSetup.m_bSetupReaders = true; Qword tKeyword ( false, false ); tKeyword.m_tDoc.m_iDocID = m_pMin->m_iDocID; tKeyword.m_iWordID = uWordID; tKeyword.m_sWord = sKeyword; tKeyword.m_sDictWord = (const char *)sTok; if ( !tTermSetup.QwordSetup ( &tKeyword ) ) sphDie ( "failed to setup keyword" ); int iSize = m_tSchema.GetRowSize(); CSphVector dAttrs ( iSize ); // press play on tape for ( ;; ) { tKeyword.GetNextDoc ( iSize ? &dAttrs[0] : NULL ); if ( !tKeyword.m_tDoc.m_iDocID ) break; tKeyword.SeekHitlist ( tKeyword.m_iHitlistPos ); int iHits = 0; if ( tKeyword.m_bHasHitlist ) for ( Hitpos_t uHit = tKeyword.GetNextHit(); uHit!=EMPTY_HIT; uHit = tKeyword.GetNextHit() ) { fprintf ( fp, "doc="DOCID_FMT", hit=0x%08x\n", tKeyword.m_tDoc.m_iDocID, uHit ); // FIXME? iHits++; } if ( !iHits ) { uint64_t uOff = tKeyword.m_iHitlistPos; fprintf ( fp, "doc="DOCID_FMT", NO HITS, inline=%d, off="UINT64_FMT"\n", tKeyword.m_tDoc.m_iDocID, (int)(uOff>>63), (uOff<<1)>>1 ); } } } bool CSphIndex_VLN::Prealloc ( bool bMlock, bool bStripPath, CSphString & sWarning ) { MEMORY ( SPH_MEM_IDX_DISK ); // reset Dealloc (); // always keep shared variables flag if ( m_dShared.IsEmpty() ) { if ( !m_dShared.Alloc ( SPH_SHARED_VARS_COUNT, m_sLastError, sWarning ) ) return false; } memset ( m_dShared.GetWritePtr(), 0, m_dShared.GetLength() ); m_pPreread = m_dShared.GetWritePtr()+0; m_pAttrsStatus = m_dShared.GetWritePtr()+1; // set new locking flag m_pDocinfo.SetMlock ( bMlock ); m_tWordlist.m_pBuf.SetMlock ( bMlock ); m_pMva.SetMlock ( bMlock ); m_pStrings.SetMlock ( bMlock ); m_pKillList.SetMlock ( bMlock ); // preload schema if ( !LoadHeader ( GetIndexFileName("sph").cstr(), bStripPath, sWarning ) ) return false; // verify that data files are readable if ( !sphIsReadable ( GetIndexFileName("spd").cstr(), &m_sLastError ) ) return false; if ( m_uVersion>=3 && !sphIsReadable ( GetIndexFileName("spp").cstr(), &m_sLastError ) ) return false; ///////////////////// // prealloc wordlist ///////////////////// // try to open wordlist file in all cases CSphAutofile tWordlist ( GetIndexFileName("spi"), SPH_O_READ, m_sLastError ); if ( tWordlist.GetFD()<0 ) return false; m_tWordlist.m_iSize = tWordlist.GetSize ( 1, true, m_sLastError ); if ( m_tWordlist.m_iSize<0 ) return false; m_bIsEmpty = ( m_tWordlist.m_iSize<=1 ); if ( m_bIsEmpty!=( m_tWordlist.m_dCheckpoints.GetLength()==0 ) ) sphWarning ( "wordlist size mismatch (size="INT64_FMT", checkpoints=%d)", m_tWordlist.m_iSize, m_tWordlist.m_dCheckpoints.GetLength() ); // make sure checkpoints are loadable // pre-11 indices use different offset type (this is fixed up later during the loading) assert ( m_tWordlist.m_iCheckpointsPos>0 ); // prealloc wordlist only !!! no need to load checkpoints here to if ( m_bPreloadWordlist ) if ( !m_tWordlist.m_pBuf.Alloc ( m_tWordlist.m_iCheckpointsPos, m_sLastError, sWarning ) ) return false; // preopen if ( m_bKeepFilesOpen ) { if ( m_tDoclistFile.Open ( GetIndexFileName("spd"), SPH_O_READ, m_sLastError ) < 0 ) return false; if ( m_tHitlistFile.Open ( GetIndexFileName ( m_uVersion>=3 ? "spp" : "spd" ), SPH_O_READ, m_sLastError ) < 0 ) return false; if ( !m_bPreloadWordlist && m_tWordlist.m_tFile.Open ( GetIndexFileName("spi"), SPH_O_READ, m_sLastError ) < 0 ) return false; } ///////////////////// // prealloc docinfos ///////////////////// if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && !m_bIsEmpty ) { ///////////// // attr data ///////////// int iStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); int iStride2 = iStride-1; // id64 - 1 DWORD = id32 int iEntrySize = sizeof(DWORD)*iStride; CSphAutofile tDocinfo ( GetIndexFileName("spa"), SPH_O_READ, m_sLastError ); if ( tDocinfo.GetFD()<0 ) return false; DWORD iDocinfoSize = DWORD ( tDocinfo.GetSize ( iEntrySize, true, m_sLastError ) / sizeof(DWORD) ); if ( iDocinfoSize<0 ) return false; DWORD iRealDocinfoSize = m_uMinMaxIndex ? m_uMinMaxIndex : iDocinfoSize; // intentionally losing data; we don't support more than 4B documents per instance yet m_uDocinfo = (DWORD)( iRealDocinfoSize / iStride ); if ( iRealDocinfoSize!=m_uDocinfo*iStride && !m_bId32to64 ) { m_sLastError.SetSprintf ( "docinfo size check mismatch (4B document limit hit?)" ); return false; } if ( m_bId32to64 ) { // check also the case of id32 here, and correct m_uDocinfo for it m_uDocinfo = (DWORD)( iRealDocinfoSize / iStride2 ); if ( iRealDocinfoSize!=m_uDocinfo*iStride2 ) { m_sLastError.SetSprintf ( "docinfo size check mismatch (4B document limit hit?)" ); return false; } m_uMinMaxIndex = m_uMinMaxIndex / iStride2 * iStride; } if ( m_uVersion < 20 ) { if ( m_bId32to64 ) iDocinfoSize = iDocinfoSize / iStride2 * iStride; m_uDocinfoIndex = ( m_uDocinfo+DOCINFO_INDEX_FREQ-1 ) / DOCINFO_INDEX_FREQ; // prealloc docinfo if ( !m_pDocinfo.Alloc ( iDocinfoSize + 2*(1+m_uDocinfoIndex)*iStride + ( m_bId32to64 ? m_uDocinfo : 0 ), m_sLastError, sWarning ) ) return false; m_pDocinfoIndex = m_pDocinfo.GetWritePtr()+iDocinfoSize; } else { if ( iDocinfoSize < iRealDocinfoSize ) { m_sLastError.SetSprintf ( "precomputed chunk size check mismatch" ); return false; } m_uDocinfoIndex = ( ( iDocinfoSize - iRealDocinfoSize ) / (m_bId32to64?iStride2:iStride) / 2 ) - 1; // prealloc docinfo if ( !m_pDocinfo.Alloc ( iDocinfoSize + ( m_bId32to64 ? ( 2 + m_uDocinfo + 2*m_uDocinfoIndex ) : 0 ), m_sLastError, sWarning ) ) return false; #if PARANOID DWORD uDocinfoIndex = ( m_uDocinfo+DOCINFO_INDEX_FREQ-1 ) / DOCINFO_INDEX_FREQ; assert ( uDocinfoIndex==m_uDocinfoIndex ); #endif m_pDocinfoIndex = m_pDocinfo.GetWritePtr()+m_uMinMaxIndex; } // prealloc docinfo hash but only if docinfo is big enough (in other words if hash is 8x+ less in size) if ( m_pDocinfoHash.IsEmpty() && m_pDocinfo.GetLength() > ( 32 << DOCINFO_HASH_BITS ) ) if ( !m_pDocinfoHash.Alloc ( ( 1 << DOCINFO_HASH_BITS )+4, m_sLastError, sWarning ) ) return false; //////////// // MVA data //////////// if ( m_uVersion>=4 ) { // if index is v4, .spm must always exist, even though length could be 0 CSphAutofile fdMva ( GetIndexFileName("spm"), SPH_O_READ, m_sLastError ); if ( fdMva.GetFD()<0 ) return false; SphOffset_t iMvaSize = fdMva.GetSize ( 0, true, m_sLastError ); if ( iMvaSize<0 ) return false; // prealloc if ( iMvaSize>0 ) if ( !m_pMva.Alloc ( DWORD(iMvaSize/sizeof(DWORD)), m_sLastError, sWarning ) ) return false; } /////////////// // string data /////////////// if ( m_uVersion>=17 ) { CSphAutofile fdStrings ( GetIndexFileName("sps"), SPH_O_READ, m_sLastError ); if ( fdStrings.GetFD()<0 ) return false; SphOffset_t iStringsSize = fdStrings.GetSize ( 0, true, m_sLastError ); if ( iStringsSize<0 ) return false; // prealloc if ( iStringsSize>0 ) if ( !m_pStrings.Alloc ( DWORD(iStringsSize), m_sLastError, sWarning ) ) return false; } } else if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && m_bIsEmpty ) { CSphAutofile tDocinfo ( GetIndexFileName("spa"), SPH_O_READ, m_sLastError ); if ( tDocinfo.GetFD()>0 ) { SphOffset_t iDocinfoSize = tDocinfo.GetSize ( 0, false, m_sLastError ); if ( iDocinfoSize ) sphWarning ( "IsEmpty != attribute size ("INT64_FMT")", iDocinfoSize ); } } // prealloc killlist if ( m_uVersion>=10 ) { CSphAutofile fdKillList ( GetIndexFileName("spk"), SPH_O_READ, m_sLastError ); if ( fdKillList.GetFD()<0 ) return false; SphOffset_t iSize = fdKillList.GetSize ( 0, true, m_sLastError ); if ( iSize<0 ) return false; if ( iSize!=(SphOffset_t)( m_iKillListSize*sizeof(SphAttr_t) ) ) { m_sLastError.SetSprintf ( "header k-list size does not match .spk size (klist=" INT64_FMT ", spk=" INT64_FMT ")", (int64_t)( m_iKillListSize*sizeof(SphAttr_t) ), (int64_t) iSize ); return false; } // prealloc if ( iSize>0 && !m_pKillList.Alloc ( m_iKillListSize, m_sLastError, sWarning ) ) return false; } bool bWordDict = false; if ( m_pDict ) bWordDict = m_pDict->GetSettings().m_bWordDict; // preload checkpoints (must be done here as they are not shared) if ( !m_tWordlist.ReadCP ( tWordlist, m_uVersion, bWordDict, m_sLastError ) ) { m_sLastError.SetSprintf ( "failed to read %s: %s", GetIndexFileName("spi").cstr(), m_sLastError.cstr () ); return false; } // all done m_bPreallocated = true; m_iIndexTag = ++m_iIndexTagSeq; return true; } template < typename T > bool CSphIndex_VLN::PrereadSharedBuffer ( CSphSharedBuffer & pBuffer, const char * sExt, size_t uExpected, DWORD uOffset ) { if ( !pBuffer.GetLength() ) return true; CSphAutofile fdBuf ( GetIndexFileName(sExt), SPH_O_READ, m_sLastError ); if ( fdBuf.GetFD()<0 ) return false; fdBuf.SetProgressCallback ( m_pProgress, &m_tProgress ); if ( uExpected==0 ) uExpected = size_t ( pBuffer.GetLength() ) - uOffset*sizeof(T); return fdBuf.Read ( pBuffer.GetWritePtr() + uOffset, uExpected, m_sLastError ); } bool CSphIndex_VLN::Preread () { MEMORY ( SPH_MEM_IDX_DISK ); sphLogDebug ( "CSphIndex_VLN::Preread invoked" ); if ( !m_bPreallocated ) { m_sLastError = "INTERNAL ERROR: not preallocated"; return false; } if ( !m_pPreread || *m_pPreread ) { m_sLastError = "INTERNAL ERROR: already preread"; return false; } /////////////////// // read everything /////////////////// m_tProgress.m_ePhase = CSphIndexProgress::PHASE_PREREAD; m_tProgress.m_iBytes = 0; m_tProgress.m_iBytesTotal = m_pDocinfo.GetLength() + m_pMva.GetLength() + m_pStrings.GetLength() + m_pKillList.GetLength(); if ( m_bPreloadWordlist ) m_tProgress.m_iBytesTotal += m_tWordlist.m_pBuf.GetLength(); sphLogDebug ( "Prereading .spa" ); if ( !PrereadSharedBuffer ( m_pDocinfo, "spa", ( m_uVersion<20 )? m_uDocinfo * ( ( m_bId32to64 ? 1 : DOCINFO_IDSIZE ) + m_tSchema.GetRowSize() ) * sizeof(DWORD) : 0 , m_bId32to64 ? ( 2 + m_uDocinfo + 2 * m_uDocinfoIndex ) : 0 ) ) return false; sphLogDebug ( "Prereading .spm" ); if ( !PrereadSharedBuffer ( m_pMva, "spm" ) ) return false; sphLogDebug ( "Prereading .sps" ); if ( !PrereadSharedBuffer ( m_pStrings, "sps" ) ) return false; sphLogDebug ( "Prereading .spk" ); if ( !PrereadSharedBuffer ( m_pKillList, "spk" ) ) return false; #if PARANOID for ( int i = 1; i < (int)m_iKillListSize; i++ ) assert ( m_pKillList[i-1] < m_pKillList[i] ); #endif // preload wordlist // FIXME! OPTIMIZE! can skip checkpoints if ( m_bPreloadWordlist ) { sphLogDebug ( "Prereading .spi" ); if ( !PrereadSharedBuffer ( m_tWordlist.m_pBuf, "spi" ) ) return false; } if ( m_pProgress ) m_pProgress ( &m_tProgress, true ); ////////////////////// // precalc everything ////////////////////// // convert id32 to id64 if ( m_pDocinfo.GetLength() && m_bId32to64 ) { DWORD *pTarget = m_pDocinfo.GetWritePtr(); DWORD *pSource = pTarget + 2 + m_uDocinfo + 2 * m_uDocinfoIndex; int iStride = m_tSchema.GetRowSize(); SphDocID_t uDoc; DWORD uLimit = m_uDocinfo + ( ( m_uVersion < 20 ) ? 0 : 2 + 2 * m_uDocinfoIndex ); for ( DWORD u=0; u=( 1 << DOCINFO_HASH_BITS ) ) { iShift++; uRange >>= 1; } DWORD * pHash = m_pDocinfoHash.GetWritePtr(); *pHash++ = iShift; *pHash = 0; DWORD uLastHash = 0; for ( DWORD i=1; iuFirst && DOCINFO2ID ( &m_pDocinfo[( int64_t ( i-1 ) )*iStride] ) < DOCINFO2ID ( &m_pDocinfo[( int64_t ( i ) )*iStride] ) && "descending document ID found" ); DWORD uHash = (DWORD)( ( DOCINFO2ID ( &m_pDocinfo[( int64_t ( i ) )*iStride] ) - uFirst ) >> iShift ); if ( uHash==uLastHash ) continue; while ( uLastHash dMvaRowitem; for ( int i=0; i=0 ) { ::close ( m_iLockFD ); ::unlink ( GetIndexFileName("spl").cstr() ); sphLogDebug ( "lock %s unlinked, file with ID %d closed", GetIndexFileName("spl").cstr(), m_iLockFD ); m_iLockFD = -1; } continue; } #endif snprintf ( sFrom, sizeof(sFrom), "%s.%s", m_sFilename.cstr(), sExt ); snprintf ( sTo, sizeof(sTo), "%s.%s", sNewBase, sExt ); #if USE_WINDOWS ::unlink ( sTo ); sphLogDebug ( "%s unlinked", sTo ); #endif if ( ::rename ( sFrom, sTo ) ) { m_sLastError.SetSprintf ( "rename %s to %s failed: %s", sFrom, sTo, strerror(errno) ); // this is no reason to fail if spl is missing, since it is only lock and no data. if ( strcmp ( sExt, "spl" ) ) break; } uMask |= ( 1UL << iExt ); } // are we good? if ( iExt==EXT_COUNT ) { SetBase ( sNewBase ); sphLogDebug ( "Base set to %s", sNewBase ); return true; } // if there were errors, rollback for ( iExt=0; iExtm_dFieldWeights.GetLength() ) { ARRAY_FOREACH ( i, pQuery->m_dFieldWeights ) { int j = tSchema.GetFieldIndex ( pQuery->m_dFieldWeights[i].m_sName.cstr() ); if ( j>=0 && jm_dFieldWeights[i].m_iValue ) * iIndexWeight; } return; } // order-bound weights if ( pQuery->m_pWeights ) { for ( int i=0; im_iWeights ); i++ ) m_dWeights[i] = Max ( MIN_WEIGHT, (int)pQuery->m_pWeights[i] ) * iIndexWeight; } } bool CSphQueryContext::SetupCalc ( CSphQueryResult * pResult, const CSphSchema & tInSchema, const CSphSchema & tSchema, const DWORD * pMvaPool ) { m_dCalcFilter.Resize ( 0 ); m_dCalcSort.Resize ( 0 ); m_dCalcFinal.Resize ( 0 ); // quickly verify that all my real attributes can be stashed there if ( tInSchema.GetAttrsCount() < tSchema.GetAttrsCount() ) { pResult->m_sError.SetSprintf ( "INTERNAL ERROR: incoming-schema mismatch (incount=%d, mycount=%d)", tInSchema.GetAttrsCount(), tSchema.GetAttrsCount() ); return false; } // now match everyone for ( int iIn=0; iInm_sError.SetSprintf ( "INTERNAL ERROR: incoming-schema attr missing from index-schema (in=%s)", sphDumpAttr(tIn).cstr() ); return false; } if ( tIn.m_eStage==SPH_EVAL_OVERRIDE ) { // override; check for type/size match and dynamic part if ( tIn.m_eAttrType!=pMy->m_eAttrType || tIn.m_tLocator.m_iBitCount!=pMy->m_tLocator.m_iBitCount || !tIn.m_tLocator.m_bDynamic ) { pResult->m_sError.SetSprintf ( "INTERNAL ERROR: incoming-schema override mismatch (in=%s, my=%s)", sphDumpAttr(tIn).cstr(), sphDumpAttr(*pMy).cstr() ); return false; } } else { // static; check for full match if (!( tIn==*pMy )) { pResult->m_sError.SetSprintf ( "INTERNAL ERROR: incoming-schema mismatch (in=%s, my=%s)", sphDumpAttr(tIn).cstr(), sphDumpAttr(*pMy).cstr() ); return false; } } break; } case SPH_EVAL_PREFILTER: case SPH_EVAL_PRESORT: case SPH_EVAL_FINAL: { ISphExpr * pExpr = tIn.m_pExpr.Ptr(); if ( !pExpr ) pExpr = sphSortSetupExpr ( tIn.m_sName, tSchema ); if ( !pExpr ) { pResult->m_sError.SetSprintf ( "INTERNAL ERROR: incoming-schema expression missing evaluator (stage=%d, in=%s)", (int)tIn.m_eStage, sphDumpAttr(tIn).cstr() ); return false; } // an expression that index/searcher should compute CalcItem_t tCalc; tCalc.m_eType = tIn.m_eAttrType; tCalc.m_tLoc = tIn.m_tLocator; tCalc.m_pExpr = pExpr; tCalc.m_pExpr->SetMVAPool ( pMvaPool ); switch ( tIn.m_eStage ) { case SPH_EVAL_PREFILTER: m_dCalcFilter.Add ( tCalc ); break; case SPH_EVAL_PRESORT: m_dCalcSort.Add ( tCalc ); break; case SPH_EVAL_FINAL: m_dCalcFinal.Add ( tCalc ); break; default: break; } break; } case SPH_EVAL_SORTER: // sorter tells it will compute itself; so just skip it break; default: pResult->m_sError.SetSprintf ( "INTERNAL ERROR: unhandled eval stage=%d", (int)tIn.m_eStage ); return false; } } // ok, we can emit matches in this schema (incoming for sorter, outgoing for index/searcher) pResult->m_tSchema = tInSchema; return true; } CSphDict * CSphIndex_VLN::SetupStarDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer & tTokenizer ) const { // setup proper dict bool bUseStarDict = false; if ( ( m_uVersion>=7 && ( m_tSettings.m_iMinPrefixLen>0 || m_tSettings.m_iMinInfixLen>0 ) && m_bEnableStar ) || // v.7 added mangling to infixes ( m_uVersion==6 && ( m_tSettings.m_iMinPrefixLen>0 ) && m_bEnableStar ) ) // v.6 added mangling to prefixes { bUseStarDict = true; } // no star? just return the original one if ( !bUseStarDict ) return pPrevDict; // spawn wrapper, and put it in the box // wrapper type depends on version; v.8 introduced new mangling rules if ( m_uVersion>=8 ) tContainer = new CSphDictStarV8 ( pPrevDict, m_tSettings.m_iMinPrefixLen>0, m_tSettings.m_iMinInfixLen>0 ); else tContainer = new CSphDictStar ( pPrevDict ); CSphRemapRange tStar ( '*', '*', '*' ); // FIXME? check and warn if star was already there tTokenizer.AddCaseFolding ( tStar ); return tContainer.Ptr(); } CSphDict * CSphIndex_VLN::SetupExactDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer & tTokenizer ) const { if ( m_uVersion<12 || !m_tSettings.m_bIndexExactWords ) return pPrevDict; tContainer = new CSphDictExact ( pPrevDict ); CSphRemapRange tStar ( '=', '=', '=' ); // FIXME? check and warn if star was already there tTokenizer.AddCaseFolding ( tStar ); return tContainer.Ptr(); } bool CSphIndex_VLN::GetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const { WITH_QWORD ( this, false, Qword, return DoGetKeywords ( dKeywords, szQuery, bGetStats, sError ) ); return false; } template < class Qword > bool CSphIndex_VLN::DoGetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const { if ( !m_pPreread || !*m_pPreread ) { sError = "index not preread"; return false; } CSphScopedPtr pDoclist ( NULL ); CSphScopedPtr pHitlist ( NULL ); CSphScopedPtr pTokenizer ( m_pTokenizer->Clone ( false ) ); // avoid race pTokenizer->EnableTokenizedMultiformTracking (); CSphScopedPtr tDictCloned ( NULL ); CSphDict * pDictBase = m_pDict; if ( pDictBase->HasState() ) { tDictCloned = pDictBase = pDictBase->Clone(); } CSphScopedPtr tDict ( NULL ); CSphDict * pDict = SetupStarDict ( tDict, pDictBase, *pTokenizer.Ptr() ); CSphScopedPtr tDict2 ( NULL ); pDict = SetupExactDict ( tDict2, pDict, *pTokenizer.Ptr() ); // prepare for setup CSphAutofile tDummy1, tDummy2, tDummy3, tWordlist; if ( !m_bKeepFilesOpen ) if ( tWordlist.Open ( GetIndexFileName ( "spi" ), SPH_O_READ, sError ) < 0 ) return false; DiskIndexQwordSetup_c tTermSetup ( tDummy1, tDummy2 , m_bPreloadWordlist ? tDummy3 : ( m_bKeepFilesOpen ? m_tWordlist.m_tFile : tWordlist ) , m_bPreloadWordlist ? 0 : m_tWordlist.m_iMaxChunk ); tTermSetup.m_pDict = pDict; tTermSetup.m_pIndex = this; tTermSetup.m_eDocinfo = m_tSettings.m_eDocinfo; dKeywords.Resize ( 0 ); Qword QueryWord ( false, false ); CSphString sTokenized; BYTE * sWord; int nWords = 0; CSphString sQbuf ( szQuery ); pTokenizer->SetBuffer ( (BYTE*)sQbuf.cstr(), strlen(szQuery) ); while ( ( sWord = pTokenizer->GetToken() )!=NULL ) { BYTE * sMultiform = pTokenizer->GetTokenizedMultiform(); if ( sMultiform ) sTokenized = (const char*)sMultiform; else sTokenized = (const char*)sWord; SphWordID_t iWord = pDict->GetWordID ( sWord ); if ( iWord ) { if ( bGetStats ) { QueryWord.Reset (); QueryWord.m_sWord = (const char*)sWord; QueryWord.m_sDictWord = (const char*)sWord; QueryWord.m_iWordID = iWord; tTermSetup.QwordSetup ( &QueryWord ); } CSphKeywordInfo & tInfo = dKeywords.Add(); Swap ( tInfo.m_sTokenized, sTokenized ); tInfo.m_sNormalized = (const char*)sWord; tInfo.m_iDocs = bGetStats ? QueryWord.m_iDocs : 0; tInfo.m_iHits = bGetStats ? QueryWord.m_iHits : 0; ++nWords; } } return true; } // fix MSVC 2005 fuckup, template DoGetKeywords() just above somehow resets forScope #if USE_WINDOWS #pragma conform(forScope,on) #endif bool CSphQueryContext::CreateFilters ( bool bFullscan, const CSphVector * pdFilters, const CSphSchema & tSchema, const DWORD * pMvaPool, CSphString & sError ) { if ( !pdFilters ) return true; ARRAY_FOREACH ( i, (*pdFilters) ) { const CSphFilterSettings & tFilter = (*pdFilters)[i]; if ( tFilter.m_sAttrName.IsEmpty() ) continue; if ( bFullscan && tFilter.m_sAttrName=="@weight" ) continue; // @weight is not avaiable in fullscan mode ISphFilter * pFilter = sphCreateFilter ( tFilter, tSchema, pMvaPool, sError ); if ( !pFilter ) return false; ISphFilter ** pGroup = tFilter.m_sAttrName=="@weight" ? &m_pWeightFilter : &m_pFilter; *pGroup = sphJoinFilters ( *pGroup, pFilter ); } return true; } bool CSphQueryContext::SetupOverrides ( const CSphQuery * pQuery, CSphQueryResult * pResult, const CSphSchema & tIndexSchema ) { m_pOverrides = NULL; m_dOverrideIn.Resize ( pQuery->m_dOverrides.GetLength() ); m_dOverrideOut.Resize ( pQuery->m_dOverrides.GetLength() ); ARRAY_FOREACH ( i, pQuery->m_dOverrides ) { const char * sAttr = pQuery->m_dOverrides[i].m_sAttr.cstr(); // shortcut const CSphColumnInfo * pCol = tIndexSchema.GetAttr ( sAttr ); if ( !pCol ) { pResult->m_sError.SetSprintf ( "attribute override: unknown attribute name '%s'", sAttr ); return false; } if ( pCol->m_eAttrType!=pQuery->m_dOverrides[i].m_eAttrType ) { pResult->m_sError.SetSprintf ( "attribute override: attribute '%s' type mismatch (index=%d, query=%d)", sAttr, pCol->m_eAttrType, pQuery->m_dOverrides[i].m_eAttrType ); return false; } const CSphColumnInfo * pOutCol = pResult->m_tSchema.GetAttr ( pQuery->m_dOverrides[i].m_sAttr.cstr() ); if ( !pOutCol ) { pResult->m_sError.SetSprintf ( "attribute override: unknown attribute name '%s' in outgoing schema", sAttr ); return false; } m_dOverrideIn[i] = pCol->m_tLocator; m_dOverrideOut[i] = pOutCol->m_tLocator; #ifndef NDEBUG // check that the values are actually sorted const CSphVector & dValues = pQuery->m_dOverrides[i].m_dValues; for ( int j=1; jm_dOverrides.GetLength() ) m_pOverrides = &pQuery->m_dOverrides; return true; } static int sphQueryHeightCalc ( const XQNode_t * pNode ) { if ( !pNode->m_dChildren.GetLength() ) return pNode->m_dWords.GetLength(); if ( pNode->GetOp()==SPH_QUERY_BEFORE ) return 1; int iMaxChild = 0; int iHeight = 0; ARRAY_FOREACH ( i, pNode->m_dChildren ) { int iBottom = sphQueryHeightCalc ( pNode->m_dChildren[i] ); int iTop = pNode->m_dChildren.GetLength()-i-1; if ( iBottom+iTop>=iMaxChild+iHeight ) { iMaxChild = iBottom; iHeight = iTop; } } return iMaxChild+iHeight; } #define SPH_EXTNODE_STACK_SIZE 120 bool sphCheckQueryHeight ( const XQNode_t * pRoot, CSphString & sError ) { int iHeight = 0; if ( pRoot ) iHeight = sphQueryHeightCalc ( pRoot ); int64_t iQueryStack = sphGetStackUsed() + iHeight*SPH_EXTNODE_STACK_SIZE; bool bValid = ( sphMyStackSize()>=iQueryStack ); if ( !bValid ) sError.SetSprintf ( "query too complex, not enough stack (thread_stack_size=%dK or higher required)", (int)( ( iQueryStack + 1024 - ( iQueryStack%1024 ) ) / 1024 ) ); return bValid; } static XQNode_t * CloneKeyword ( const XQNode_t * pNode ) { assert ( pNode ); XQNode_t * pRes = new XQNode_t ( pNode->m_dSpec ); pRes->m_dWords = pNode->m_dWords; return pRes; } static XQNode_t * ExpandKeyword ( XQNode_t * pNode, const CSphIndexSettings & tSettings ) { assert ( pNode ); XQNode_t * pExpand = new XQNode_t ( pNode->m_dSpec ); pExpand->SetOp ( SPH_QUERY_OR, pNode ); if ( tSettings.m_iMinInfixLen>0 ) { assert ( pNode->m_dChildren.GetLength()==0 ); assert ( pNode->m_dWords.GetLength()==1 ); XQNode_t * pInfix = CloneKeyword ( pNode ); pInfix->m_dWords[0].m_sWord.SetSprintf ( "*%s*", pNode->m_dWords[0].m_sWord.cstr() ); pInfix->m_dWords[0].m_uStarPosition = STAR_BOTH; pExpand->m_dChildren.Add ( pInfix ); } if ( tSettings.m_bIndexExactWords ) { assert ( pNode->m_dChildren.GetLength()==0 ); assert ( pNode->m_dWords.GetLength()==1 ); XQNode_t * pExact = CloneKeyword ( pNode ); pExact->m_dWords[0].m_sWord.SetSprintf ( "=%s", pNode->m_dWords[0].m_sWord.cstr() ); pExpand->m_dChildren.Add ( pExact ); } return pExpand; } static XQNode_t * ExpandKeywords ( XQNode_t * pNode, const CSphIndexSettings & tSettings ) { // only if expansion makes sense at all if ( tSettings.m_iMinInfixLen<=0 && !tSettings.m_bIndexExactWords ) return pNode; // process children for composite nodes if ( pNode->m_dChildren.GetLength() ) { ARRAY_FOREACH ( i, pNode->m_dChildren ) pNode->m_dChildren[i] = ExpandKeywords ( pNode->m_dChildren[i], tSettings ); return pNode; } // if that's a phrase/proximity node, create a very special, magic phrase/proximity node if ( pNode->GetOp()==SPH_QUERY_PHRASE || pNode->GetOp()==SPH_QUERY_PROXIMITY || pNode->GetOp()==SPH_QUERY_QUORUM ) { assert ( pNode->m_dWords.GetLength()>1 ); ARRAY_FOREACH ( i, pNode->m_dWords ) { XQNode_t * pWord = new XQNode_t ( pNode->m_dSpec ); pWord->m_dWords.Add ( pNode->m_dWords[i] ); pNode->m_dChildren.Add ( ExpandKeyword ( pWord, tSettings ) ); pNode->m_dChildren.Last()->m_iAtomPos = pNode->m_dWords[i].m_iAtomPos; } pNode->m_dWords.Reset(); pNode->m_bVirtuallyPlain = true; return pNode; } // skip empty plain nodes if ( pNode->m_dWords.GetLength()<=0 ) return pNode; // process keywords for plain nodes assert ( pNode->m_dWords.GetLength()==1 ); XQKeyword_t & tKeyword = pNode->m_dWords[0]; if ( tKeyword.m_uStarPosition!=STAR_NONE || tKeyword.m_sWord.Begins("=") || tKeyword.m_sWord.Begins("*") || tKeyword.m_sWord.Ends("*") ) { return pNode; } // do the expansion return ExpandKeyword ( pNode, tSettings ); } // transform the "one two three"/1 quorum into one|two|three (~40% faster) static void TransformQuorum ( XQNode_t ** ppNode ) { XQNode_t *& pNode = *ppNode; if ( pNode->GetOp()!=SPH_QUERY_QUORUM || pNode->m_iOpArg!=1 ) return; assert ( pNode->m_dChildren.GetLength()==0 ); CSphVector dArgs; ARRAY_FOREACH ( i, pNode->m_dWords ) { XQNode_t * pAnd = new XQNode_t ( pNode->m_dSpec ); pAnd->m_dWords.Add ( pNode->m_dWords[i] ); dArgs.Add ( pAnd ); } pNode->m_dWords.Reset(); pNode->SetOp ( SPH_QUERY_OR, dArgs ); } struct BinaryNode_t { int m_iLo; int m_iHi; }; static void BuildExpandedTree ( const XQKeyword_t & tRootWord, CSphVector & dWordSrc, XQNode_t * pRoot ) { assert ( dWordSrc.GetLength() ); pRoot->m_dWords.Reset(); CSphVector dNodes; dNodes.Reserve ( dWordSrc.GetLength() ); XQNode_t * pCur = pRoot; dNodes.Add(); dNodes.Last().m_iLo = 0; dNodes.Last().m_iHi = ( dWordSrc.GetLength()-1 ); while ( dNodes.GetLength() ) { BinaryNode_t tNode = dNodes.Pop(); if ( tNode.m_iHim_pParent; continue; } int iMid = ( tNode.m_iLo+tNode.m_iHi ) / 2; dNodes.Add (); dNodes.Last().m_iLo = tNode.m_iLo; dNodes.Last().m_iHi = iMid-1; dNodes.Add (); dNodes.Last().m_iLo = iMid+1; dNodes.Last().m_iHi = tNode.m_iHi; if ( pCur->m_dWords.GetLength() ) { assert ( pCur->m_dWords.GetLength()==1 ); XQNode_t * pTerm = CloneKeyword ( pRoot ); Swap ( pTerm->m_dWords, pCur->m_dWords ); pCur->m_dChildren.Add ( pTerm ); } XQNode_t * pChild = CloneKeyword ( pRoot ); pChild->m_dWords.Add ( tRootWord ); pChild->m_dWords.Last().m_sWord.Swap ( dWordSrc[iMid].m_sName ); pChild->m_dWords.Last().m_bExpanded = true; pChild->m_bNotWeighted = ( dWordSrc[iMid].m_iValue==0 ); pChild->m_pParent = pCur; pCur->m_dChildren.Add ( pChild ); pCur->SetOp ( SPH_QUERY_OR ); pCur = pChild; } } void Swap ( CSphNamedInt & a, CSphNamedInt & b ) { a.m_sName.Swap ( b.m_sName ); Swap ( a.m_iValue, b.m_iValue ); } struct WordDocsGreaterOp_t { inline bool IsLess ( const CSphNamedInt & a, const CSphNamedInt & b ) { return a.m_iValue > b.m_iValue; } }; XQNode_t * sphExpandXQNode ( XQNode_t * pNode, ExpansionContext_t & tCtx ) { assert ( pNode ); assert ( tCtx.m_pResult ); // process children for composite nodes if ( pNode->m_dChildren.GetLength() ) { ARRAY_FOREACH ( i, pNode->m_dChildren ) { pNode->m_dChildren[i] = sphExpandXQNode ( pNode->m_dChildren[i], tCtx ); } return pNode; } // if that's a phrase/proximity node, create a very special, magic phrase/proximity node if ( pNode->GetOp()==SPH_QUERY_PHRASE || pNode->GetOp()==SPH_QUERY_PROXIMITY || pNode->GetOp()==SPH_QUERY_QUORUM ) { assert ( pNode->m_dWords.GetLength()>1 ); ARRAY_FOREACH ( i, pNode->m_dWords ) { XQNode_t * pWord = new XQNode_t ( pNode->m_dSpec ); pWord->m_dWords.Add ( pNode->m_dWords[i] ); pNode->m_dChildren.Add ( sphExpandXQNode ( pWord, tCtx ) ); pNode->m_dChildren.Last()->m_iAtomPos = pNode->m_dWords[i].m_iAtomPos; // tricky part // current node may have field/zone limits attached // normally those get pushed down during query parsing // but here we create nodes manually and have to push down limits too pWord->CopySpecs ( pNode ); } pNode->m_dWords.Reset(); pNode->m_bVirtuallyPlain = true; return pNode; } // skip empty plain nodes if ( pNode->m_dWords.GetLength()<=0 ) return pNode; // process keywords for plain nodes assert ( pNode->m_dChildren.GetLength()==0 ); assert ( pNode->m_dWords.GetLength()==1 ); if ( ( !tCtx.m_bStarEnabled || !pNode->m_dWords[0].m_sWord.Ends("*") ) ) return pNode; const CSphString & sFullWord = pNode->m_dWords[0].m_sWord; const char * sAdjustedWord = sFullWord.cstr(); int iWordLen = sFullWord.Length(); if ( tCtx.m_bStarEnabled ) iWordLen = Max ( iWordLen-1, 0 ); // leading special symbols trimming if ( sFullWord.Begins("=") || sFullWord.Begins("*") ) { sAdjustedWord++; iWordLen = Max ( iWordLen-1, 0 ); } // we refuse to search query less then min-prefix-len if ( iWordLenm_dWords[0].m_sWord.SubString ( sAdjustedWord-sFullWord.cstr(), iWordLen ); sFixed.SetSprintf ( "%c%s", MAGIC_WORD_HEAD_NONSTEMMED, sFixed.cstr() ); sAdjustedWord = sFixed.cstr(); iWordLen++; } CSphVector dPrefixedWords; tCtx.m_pWordlist->GetPrefixedWords ( sAdjustedWord, iWordLen, dPrefixedWords, tCtx.m_pBuf, tCtx.m_iFD ); if ( !dPrefixedWords.GetLength() ) { // mark source word as expanded to prevent warning on terms mismatch in statistics pNode->m_dWords.Begin()->m_bExpanded = true; return pNode; } // sort word's to leftmost max documents, rightmost least documents dPrefixedWords.Sort ( WordDocsGreaterOp_t() ); // clip words with the lowest doc frequency as rare words are misspelling if ( tCtx.m_iExpansionLimit && tCtx.m_iExpansionLimitAddStat ( dPrefixedWords[i].m_sName, 0, 0, true ); } // replace MAGIC_WORD_HEAD_NONSTEMMED symbol to '=' if ( tCtx.m_bHasMorphology ) { ARRAY_FOREACH ( i, dPrefixedWords ) { ( (char *)dPrefixedWords[i].m_sName.cstr() )[0] = '='; } } const XQKeyword_t tPrefixingWord = pNode->m_dWords[0]; BuildExpandedTree ( tPrefixingWord, dPrefixedWords, pNode ); return pNode; } XQNode_t * CSphIndex_VLN::ExpandPrefix ( XQNode_t * pNode, CSphString & sError, CSphQueryResultMeta * pResult ) const { if ( !pNode || !( m_pDict->GetSettings().m_bWordDict && m_tSettings.m_iMinPrefixLen>0 ) ) return pNode; // thread safe outer storage for dictionaries chunks and file BYTE * pBuf = NULL; int iFD = -1; CSphAutofile rdWordlist; if ( !m_bPreloadWordlist ) { if ( m_bKeepFilesOpen ) iFD = m_tWordlist.m_tFile.GetFD(); else { iFD = rdWordlist.Open ( GetIndexFileName ( "spi" ), SPH_O_READ, sError ); if ( iFD<0 ) return NULL; } if ( m_tWordlist.m_iMaxChunk>0 ) pBuf = new BYTE [ m_tWordlist.m_iMaxChunk ]; } assert ( m_pPreread && *m_pPreread ); assert ( !m_bPreloadWordlist || !m_tWordlist.m_pBuf.IsEmpty() ); ExpansionContext_t tCtx; tCtx.m_pWordlist = &m_tWordlist; tCtx.m_pBuf = pBuf; tCtx.m_pResult = pResult; tCtx.m_iFD = iFD; tCtx.m_iMinPrefixLen = m_tSettings.m_iMinPrefixLen; tCtx.m_iExpansionLimit = m_iExpansionLimit; tCtx.m_bStarEnabled = m_bEnableStar; tCtx.m_bHasMorphology = m_pDict->HasMorphology(); pNode = sphExpandXQNode ( pNode, tCtx ); SafeDeleteArray ( pBuf ); return pNode; } // transform the (A B) NEAR C into A NEAR B NEAR C static void TransformNear ( XQNode_t ** ppNode ) { XQNode_t *& pNode = *ppNode; if ( pNode->GetOp()==SPH_QUERY_NEAR ) { assert ( pNode->m_dWords.GetLength()==0 ); CSphVector dArgs; int iStartFrom; // transform all (A B C) NEAR D into A NEAR B NEAR C NEAR D do { dArgs.Reset(); iStartFrom = 0; ARRAY_FOREACH ( i, pNode->m_dChildren ) { XQNode_t * pChild = pNode->m_dChildren[i]; ///< shortcut if ( pChild->GetOp()==SPH_QUERY_AND && pChild->m_dChildren.GetLength()>0 ) { ARRAY_FOREACH ( j, pChild->m_dChildren ) if ( j==0 && iStartFrom==0 ) { // we will remove the node anyway, so just replace it with 1-st child instead pNode->m_dChildren[i] = pChild->m_dChildren[j]; iStartFrom = i+1; } else dArgs.Add ( pChild->m_dChildren[j] ); pChild->m_dChildren.Reset(); SafeDelete ( pChild ); } else if ( iStartFrom!=0 ) dArgs.Add ( pChild ); } if ( iStartFrom!=0 ) { pNode->m_dChildren.Resize ( iStartFrom + dArgs.GetLength() ); ARRAY_FOREACH ( i, dArgs ) pNode->m_dChildren [ i + iStartFrom ] = dArgs[i]; } } while ( iStartFrom!=0 ); } ARRAY_FOREACH ( i, pNode->m_dChildren ) TransformNear ( &pNode->m_dChildren[i] ); } /// tag excluded keywords (rvals to operator NOT) static void TagExcluded ( XQNode_t * pNode, bool bNot ) { if ( pNode->GetOp()==SPH_QUERY_ANDNOT ) { assert ( pNode->m_dChildren.GetLength()==2 ); assert ( pNode->m_dWords.GetLength()==0 ); TagExcluded ( pNode->m_dChildren[0], bNot ); TagExcluded ( pNode->m_dChildren[1], !bNot ); } else if ( pNode->m_dChildren.GetLength() ) { // FIXME? check if this works okay with "virtually plain" stuff? ARRAY_FOREACH ( i, pNode->m_dChildren ) TagExcluded ( pNode->m_dChildren[i], bNot ); } else { // tricky bit // no assert on length here and that is intended // we have fully empty nodes (0 children, 0 words) sometimes! ARRAY_FOREACH ( i, pNode->m_dWords ) pNode->m_dWords[i].m_bExcluded = bNot; } } void sphTransformExtendedQuery ( XQNode_t ** ppNode ) { TransformQuorum ( ppNode ); TransformNear ( ppNode ); TagExcluded ( *ppNode, false ); } struct CmpPSortersByRandom_fn { inline bool IsLess ( const ISphMatchSorter * a, const ISphMatchSorter * b ) const { assert ( a ); assert ( b ); return a->m_bRandomize < b->m_bRandomize; } }; /// one regular query vs many sorters bool CSphIndex_VLN::MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const { assert ( pQuery ); MEMORY ( SPH_MEM_IDX_DISK_MULTY_QUERY ); // to avoid the checking of a ppSorters's element for NULL on every next step, just filter out all nulls right here CSphVector dSorters; dSorters.Reserve ( iSorters ); for ( int i=0; im_sQuery.IsEmpty() ) return MultiScan ( pQuery, pResult, iSorters, &dSorters[0], pExtraFilters, iTag ); CSphScopedPtr pTokenizer ( m_pTokenizer->Clone ( false ) ); CSphScopedPtr tDictCloned ( NULL ); CSphDict * pDictBase = m_pDict; if ( pDictBase->HasState() ) { tDictCloned = pDictBase = pDictBase->Clone(); } CSphScopedPtr tDict ( NULL ); CSphDict * pDict = SetupStarDict ( tDict, pDictBase, *pTokenizer.Ptr() ); CSphScopedPtr tDict2 ( NULL ); pDict = SetupExactDict ( tDict2, pDict, *pTokenizer.Ptr() ); // parse query XQQuery_t tParsed; if ( !sphParseExtendedQuery ( tParsed, pQuery->m_sQuery.cstr(), pTokenizer.Ptr(), &m_tSchema, pDict, m_tSettings.m_iStopwordStep ) ) { pResult->m_sError = tParsed.m_sParseError; return false; } // transform query if needed (quorum transform, keyword expansion, etc.) sphTransformExtendedQuery ( &tParsed.m_pRoot ); // expanding prefix in word dictionary case XQNode_t * pPrefixed = ExpandPrefix ( tParsed.m_pRoot, pResult->m_sError, pResult ); if ( !pPrefixed ) return false; tParsed.m_pRoot = pPrefixed; if ( m_bExpandKeywords ) tParsed.m_pRoot = ExpandKeywords ( tParsed.m_pRoot, m_tSettings ); if ( !sphCheckQueryHeight ( tParsed.m_pRoot, pResult->m_sError ) ) return false; // flag common subtrees int iCommonSubtrees = 0; if ( m_iMaxCachedDocs && m_iMaxCachedHits ) iCommonSubtrees = sphMarkCommonSubtrees ( 1, &tParsed ); CSphQueryNodeCache tNodeCache ( iCommonSubtrees, m_iMaxCachedDocs, m_iMaxCachedHits ); bool bResult = ParsedMultiQuery ( pQuery, pResult, iSorters, &dSorters[0], tParsed, pDict, pExtraFilters, &tNodeCache, iTag ); return bResult; } /// many regular queries with one sorter attached to each query. /// returns true if at least one query succeeded. The failed queries indicated with pResult->m_iMultiplier==-1 bool CSphIndex_VLN::MultiQueryEx ( int iQueries, const CSphQuery * pQueries, CSphQueryResult ** ppResults, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const { // ensure we have multiple queries if ( iQueries==1 ) return MultiQuery ( pQueries, ppResults[0], 1, ppSorters, pExtraFilters, iTag ); MEMORY ( SPH_MEM_IDX_DISK_MULTY_QUERY_EX ); assert ( pQueries ); assert ( ppResults ); assert ( ppSorters ); ISphTokenizer * pTokenizer = m_pTokenizer->Clone ( false ); CSphScopedPtr tDictCloned ( NULL ); CSphDict * pDictBase = m_pDict; if ( pDictBase->HasState() ) { tDictCloned = pDictBase = pDictBase->Clone(); } CSphScopedPtr tDict ( NULL ); CSphDict * pDict = SetupStarDict ( tDict, pDictBase, *pTokenizer ); CSphScopedPtr tDict2 ( NULL ); pDict = SetupExactDict ( tDict2, pDict, *pTokenizer ); CSphFixedVector dXQ ( iQueries ); bool bResult = false; bool bResultScan = false; for ( int i=0; im_iMultiplier = -1; ///< show that this particular query failed continue; } // fast path for scans if ( pQueries[i].m_sQuery.IsEmpty() ) { if ( MultiScan ( pQueries + i, ppResults[i], 1, &ppSorters[i], pExtraFilters, iTag ) ) bResultScan = true; else ppResults[i]->m_iMultiplier = -1; ///< show that this particular query failed continue; } // parse query if ( sphParseExtendedQuery ( dXQ[i], pQueries[i].m_sQuery.cstr(), pTokenizer, &m_tSchema, pDict, m_tSettings.m_iStopwordStep ) ) { // transform query if needed (quorum transform, keyword expansion, etc.) sphTransformExtendedQuery ( &dXQ[i].m_pRoot ); // expanding prefix in word dictionary case XQNode_t * pPrefixed = ExpandPrefix ( dXQ[i].m_pRoot, ppResults[i]->m_sError, ppResults[i] ); if ( pPrefixed ) { dXQ[i].m_pRoot = pPrefixed; if ( m_bExpandKeywords ) dXQ[i].m_pRoot = ExpandKeywords ( dXQ[i].m_pRoot, m_tSettings ); if ( sphCheckQueryHeight ( dXQ[i].m_pRoot, ppResults[i]->m_sError ) ) { bResult = true; } else { ppResults[i]->m_iMultiplier = -1; SafeDelete ( dXQ[i].m_pRoot ); } } else { ppResults[i]->m_iMultiplier = -1; SafeDelete ( dXQ[i].m_pRoot ); } } else { ppResults[i]->m_sError = dXQ[i].m_sParseError; ppResults[i]->m_iMultiplier = -1; } } // continue only if we have at least one non-failed if ( bResult ) { int iCommonSubtrees = 0; if ( m_iMaxCachedDocs && m_iMaxCachedHits ) iCommonSubtrees = sphMarkCommonSubtrees ( iQueries, &dXQ[0] ); CSphQueryNodeCache tNodeCache ( iCommonSubtrees, m_iMaxCachedDocs, m_iMaxCachedHits ); bResult = false; for ( int j=0; jm_iMultiplier = iCommonSubtrees ? iQueries : 1; } else ppResults[j]->m_iMultiplier = -1; } SafeDelete ( pTokenizer ); return bResult | bResultScan; } bool CSphIndex_VLN::ParsedMultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const XQQuery_t & tXQ, CSphDict * pDict, const CSphVector * pExtraFilters, CSphQueryNodeCache * pNodeCache, int iTag ) const { assert ( pQuery ); assert ( pResult ); assert ( ppSorters ); assert ( !pQuery->m_sQuery.IsEmpty() && pQuery->m_eMode!=SPH_MATCH_FULLSCAN ); // scans must go through MultiScan() assert ( iTag>=0 ); // start counting int64_t tmQueryStart = sphMicroTimer(); /////////////////// // setup searching /////////////////// PROFILER_INIT (); PROFILE_BEGIN ( query_init ); // non-ready index, empty response! if ( !m_pPreread || !*m_pPreread ) { pResult->m_sError = "index not preread"; return false; } // select the sorter with max schema int iMaxSchemaSize = -1; int iMaxSchemaIndex = -1; for ( int i=0; iGetSchema().GetRowSize() > iMaxSchemaSize ) { iMaxSchemaSize = ppSorters[i]->GetSchema().GetRowSize(); iMaxSchemaIndex = i; } // setup calculations and result schema CSphQueryContext tCtx; if ( !tCtx.SetupCalc ( pResult, ppSorters[iMaxSchemaIndex]->GetSchema(), m_tSchema, GetMVAPool() ) ) return false; // set string pool for string on_sort expression fix up tCtx.SetStringPool ( m_pStrings.GetWritePtr() ); // open files CSphAutofile tDoclist, tHitlist, tWordlist, tDummy; if ( !m_bKeepFilesOpen ) { if ( tDoclist.Open ( GetIndexFileName("spd"), SPH_O_READ, pResult->m_sError ) < 0 ) return false; if ( tHitlist.Open ( GetIndexFileName ( m_uVersion>=3 ? "spp" : "spd" ), SPH_O_READ, pResult->m_sError ) < 0 ) return false; if ( tWordlist.Open ( GetIndexFileName ( "spi" ), SPH_O_READ, pResult->m_sError ) < 0 ) return false; } // setup search terms DiskIndexQwordSetup_c tTermSetup ( m_bKeepFilesOpen ? m_tDoclistFile : tDoclist, m_bKeepFilesOpen ? m_tHitlistFile : tHitlist, m_bPreloadWordlist ? tDummy : ( m_bKeepFilesOpen ? m_tWordlist.m_tFile : tWordlist ), m_bPreloadWordlist ? 0 : m_tWordlist.m_iMaxChunk ); tTermSetup.m_pDict = pDict; tTermSetup.m_pIndex = this; tTermSetup.m_eDocinfo = m_tSettings.m_eDocinfo; tTermSetup.m_tMin.m_iDocID = m_pMin->m_iDocID; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) { tTermSetup.m_tMin.Clone ( *m_pMin, m_tSchema.GetRowSize() ); tTermSetup.m_iInlineRowitems = m_tSchema.GetRowSize(); } tTermSetup.m_iDynamicRowitems = pResult->m_tSchema.GetDynamicSize(); if ( pQuery->m_uMaxQueryMsec>0 ) tTermSetup.m_iMaxTimer = sphMicroTimer() + pQuery->m_uMaxQueryMsec*1000; // max_query_time tTermSetup.m_pWarning = &pResult->m_sWarning; tTermSetup.m_bSetupReaders = true; tTermSetup.m_pCtx = &tCtx; tTermSetup.m_pNodeCache = pNodeCache; int iIndexWeight = pQuery->GetIndexWeight ( m_sIndexName.cstr() ); // bind weights tCtx.BindWeights ( pQuery, m_tSchema, iIndexWeight ); // setup query // must happen before index-level reject, in order to build proper keyword stats CSphScopedPtr pRanker ( sphCreateRanker ( tXQ, pQuery, pResult, tTermSetup, tCtx ) ); if ( !pRanker.Ptr() ) return false; // empty index, empty response! if ( m_bIsEmpty ) return true; assert ( m_tSettings.m_eDocinfo!=SPH_DOCINFO_EXTERN || !m_pDocinfo.IsEmpty() ); // check that docinfo is preloaded // setup filters if ( !tCtx.CreateFilters ( pQuery->m_sQuery.IsEmpty(), &pQuery->m_dFilters, pResult->m_tSchema, GetMVAPool(), pResult->m_sError ) ) return false; if ( !tCtx.CreateFilters ( pQuery->m_sQuery.IsEmpty(), pExtraFilters, pResult->m_tSchema, GetMVAPool(), pResult->m_sError ) ) return false; // check if we can early reject the whole index if ( tCtx.m_pFilter && m_uDocinfoIndex ) { DWORD uStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); DWORD * pMinEntry = const_cast ( &m_pDocinfoIndex [ 2*m_uDocinfoIndex*uStride ] ); DWORD * pMaxEntry = pMinEntry + uStride; if ( !tCtx.m_pFilter->EvalBlock ( pMinEntry, pMaxEntry ) ) return true; } // setup lookup tCtx.m_bLookupFilter = ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN ) && pQuery->m_dFilters.GetLength(); if ( tCtx.m_dCalcFilter.GetLength() || pQuery->m_eRanker==SPH_RANK_EXPR ) tCtx.m_bLookupFilter = true; // suboptimal in case of attr-independent expressions, but we don't care tCtx.m_bLookupSort = false; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && !tCtx.m_bLookupFilter ) for ( int iSorter=0; iSorterUsesAttrs() ) tCtx.m_bLookupSort = true; if ( tCtx.m_dCalcSort.GetLength() ) tCtx.m_bLookupSort = true; // suboptimal in case of attr-independent expressions, but we don't care // setup sorters vs. MVA for ( int i=0; iSetMVAPool ( m_pMva.GetWritePtr() ); (ppSorters[i])->SetStringPool ( m_pStrings.GetWritePtr() ); } // setup overrides if ( !tCtx.SetupOverrides ( pQuery, pResult, m_tSchema ) ) return false; PROFILE_END ( query_init ); ////////////////////////////////////// // find and weight matching documents ////////////////////////////////////// bool bFinalLookup = !tCtx.m_bLookupFilter && !tCtx.m_bLookupSort; bool bFinalPass = bFinalLookup || tCtx.m_dCalcFinal.GetLength(); int iMyTag = bFinalPass ? -1 : iTag; PROFILE_BEGIN ( query_match ); switch ( pQuery->m_eMode ) { case SPH_MATCH_ALL: case SPH_MATCH_PHRASE: case SPH_MATCH_ANY: case SPH_MATCH_EXTENDED: case SPH_MATCH_EXTENDED2: case SPH_MATCH_BOOLEAN: if ( !MatchExtended ( &tCtx, pQuery, iSorters, ppSorters, pRanker.Ptr(), iMyTag ) ) return false; break; default: sphDie ( "INTERNAL ERROR: unknown matching mode (mode=%d)", pQuery->m_eMode ); } PROFILE_END ( query_match ); //////////////////// // cook result sets //////////////////// // adjust result sets for ( int iSorter=0; iSorterGetLength() && bFinalPass ) { CSphMatch * const pHead = pTop->Finalize(); const int iCount = pTop->GetLength (); CSphMatch * const pTail = pHead + iCount; for ( CSphMatch * pCur=pHead; pCurm_iTag<0 ) { if ( bFinalLookup ) CopyDocinfo ( &tCtx, *pCur, FindDocinfo ( pCur->m_iDocID ) ); tCtx.CalcFinal ( *pCur ); pCur->m_iTag = iTag; } } // mva and string pools ptrs pResult->m_pMva = m_pMva.GetWritePtr(); pResult->m_pStrings = m_pStrings.GetWritePtr(); } PROFILER_DONE (); PROFILE_SHOW (); // query timer pResult->m_iQueryTime += (int)( ( sphMicroTimer()-tmQueryStart )/1000 ); return true; } ////////////////////////////////////////////////////////////////////////// // INDEX CHECKING ////////////////////////////////////////////////////////////////////////// #define LOC_FAIL(_args) \ if ( ++iFails<=FAILS_THRESH ) \ { \ fprintf ( fp, "FAILED, " ); \ fprintf _args; \ fprintf ( fp, "\n" ); \ iFailsPrinted++; \ \ if ( iFails==FAILS_THRESH ) \ fprintf ( fp, "(threshold reached; suppressing further output)\n" ); \ } int CSphIndex_VLN::DebugCheck ( FILE * fp ) { int64_t tmCheck = sphMicroTimer(); int iFails = 0; int iFailsPrinted = 0; const int FAILS_THRESH = 100; // check if index is ready if ( m_dShared.GetNumEntries()!=SPH_SHARED_VARS_COUNT || !m_pPreread || !*m_pPreread ) LOC_FAIL(( fp, "index not preread" )); bool bProgress = isatty ( fileno ( fp ) )!=0; ////////////// // open files ////////////// CSphString sError; CSphAutoreader rdDict, rdDocs, rdHits; if ( !rdDict.Open ( GetIndexFileName("spi"), sError ) ) LOC_FAIL(( fp, "unable to open dictionary: %s", sError.cstr() )); if ( !rdDocs.Open ( GetIndexFileName("spd"), sError ) ) LOC_FAIL(( fp, "unable to open doclist: %s", sError.cstr() )); if ( !rdHits.Open ( GetIndexFileName("spp"), sError ) ) LOC_FAIL(( fp, "unable to open hitlist: %s", sError.cstr() )); //////////////////// // check dictionary //////////////////// fprintf ( fp, "checking dictionary...\n" ); SphWordID_t uWordid = 0; int64_t iDoclistOffset = 0; int iWordsTotal = 0; char sWord[MAX_KEYWORD_BYTES], sLastWord[MAX_KEYWORD_BYTES]; memset ( sLastWord, 0, sizeof(sLastWord) ); const int iWordPerCP = m_uVersion>=21 ? SPH_WORDLIST_CHECKPOINT : 1024; const bool bWordDict = m_pDict->GetSettings().m_bWordDict; CSphVector dCheckpoints; if ( bWordDict && m_uVersion<21 ) LOC_FAIL(( fp, "dictionary needed index version not less then 21 (readed=%d)" , m_uVersion )); rdDict.SeekTo ( 1, READ_NO_SIZE_HINT ); for ( ; rdDict.GetPos()!=m_tWordlist.m_iCheckpointsPos && !m_bIsEmpty; ) { // sanity checks if ( rdDict.GetPos()>=m_tWordlist.m_iCheckpointsPos ) { LOC_FAIL(( fp, "reading past checkpoints" )); break; } // store current entry pos (for checkpointing later), read next delta const int64_t iDictPos = rdDict.GetPos(); const SphWordID_t iDeltaWord = bWordDict ? rdDict.GetByte() : rdDict.UnzipWordid(); // checkpoint encountered, handle it if ( !iDeltaWord ) { rdDict.UnzipOffset(); if ( ( iWordsTotal%iWordPerCP )!=0 && rdDict.GetPos()!=m_tWordlist.m_iCheckpointsPos ) LOC_FAIL(( fp, "unexpected checkpoint (pos="INT64_FMT", word=%d, words=%d, expected=%d)", iDictPos, iWordsTotal, ( iWordsTotal%iWordPerCP ), iWordPerCP )); uWordid = 0; iDoclistOffset = 0; continue; } SphWordID_t uNewWordid = 0; SphOffset_t iNewDoclistOffset = 0; int iDocs = 0; int iHits = 0; if ( bWordDict ) { // unpack next word // must be in sync with DictEnd()! BYTE uPack = (BYTE)iDeltaWord; int iMatch, iDelta; if ( uPack & 0x80 ) { iDelta = ( ( uPack>>4 ) & 7 ) + 1; iMatch = uPack & 15; } else { iDelta = uPack & 127; iMatch = rdDict.GetByte(); } const int iLastWordLen = strlen(sLastWord); if ( iMatch+iDelta>=(int)sizeof(sLastWord)-1 || iMatch>iLastWordLen ) { LOC_FAIL(( fp, "wrong word-delta (pos="INT64_FMT", word=%s, len=%d, begin=%d, delta=%d)", iDictPos, sLastWord, iLastWordLen, iMatch, iDelta )); rdDict.SkipBytes ( iDelta ); } else { rdDict.GetBytes ( sWord + iMatch, iDelta ); sWord [ iMatch+iDelta ] = '\0'; } iNewDoclistOffset = rdDict.UnzipOffset(); iDocs = rdDict.UnzipInt(); iHits = rdDict.UnzipInt(); int iHint = ( iDocs>=DOCLIST_HINT_THRESH ) ? rdDict.GetByte() : 0; iHint = DoclistHintUnpack ( iDocs, (BYTE)iHint ); const int iNewWordLen = strlen(sWord); if ( iNewWordLen==0 ) LOC_FAIL(( fp, "empty word in dictionary (pos="INT64_FMT")", iDictPos )); if ( iLastWordLen && iNewWordLen ) if ( sphDictCmpStrictly ( sWord, iNewWordLen, sLastWord, iLastWordLen )<=0 ) LOC_FAIL(( fp, "word order decreased (pos="INT64_FMT", word=%s, prev=%s)", iDictPos, sLastWord, sWord )); if ( iHint<0 ) LOC_FAIL(( fp, "invalid word hint (pos="INT64_FMT", word=%s, hint=%d)", iDictPos, sWord, iHint )); if ( iDocs<=0 || iHits<=0 || iHits>4 ) & 7 ) + 1; iMatch = uPack & 15; } else { iDelta = uPack & 127; iMatch = rdDict.GetByte(); } const int iLastWordLen = strlen(sWord); if ( iMatch+iDelta>=(int)sizeof(sWord)-1 || iMatch>iLastWordLen ) rdDict.SkipBytes ( iDelta ); else { rdDict.GetBytes ( sWord + iMatch, iDelta ); sWord [ iMatch+iDelta ] = '\0'; } iDoclistOffset = rdDict.UnzipOffset(); iDictDocs = rdDict.UnzipInt(); iDictHits = rdDict.UnzipInt(); int iHint = ( iDictDocs>=DOCLIST_HINT_THRESH ) ? rdDict.GetByte() : 0; iHint = DoclistHintUnpack ( iDictDocs, (BYTE)iHint ); } else { // finish reading the entire entry uWordid = uWordid + iDeltaWord; iDoclistOffset = iDoclistOffset + rdDict.UnzipOffset(); iDictDocs = rdDict.UnzipInt(); iDictHits = rdDict.UnzipInt(); } // check whether the offset is as expected if ( iDoclistOffset!=rdDocs.GetPos() ) { if ( !bWordDict ) LOC_FAIL(( fp, "unexpected doclist offset (wordid="UINT64_FMT"(%s)(%d), dictpos="INT64_FMT", doclistpos="INT64_FMT")", (uint64_t)uWordid, sWord, iWordsChecked, iDoclistOffset, (int64_t)rdDocs.GetPos() )); if ( iDoclistOffset>=iDocsSize || iDoclistOffset<0 ) { LOC_FAIL(( fp, "unexpected doclist offset, off the file (wordid="UINT64_FMT"(%s)(%d), dictpos="INT64_FMT", doclistsize="INT64_FMT")", (uint64_t)uWordid, sWord, iWordsChecked, iDoclistOffset, iDocsSize )); iWordsChecked++; continue; } else rdDocs.SeekTo ( iDoclistOffset, READ_NO_SIZE_HINT ); } // create and manually setup doclist reader DiskIndexQwordTraits_c * pQword = NULL; WITH_QWORD ( this, false, T, pQword = new T ( false, false ) ); pQword->m_tDoc.Reset ( m_tSchema.GetDynamicSize() ); pQword->m_iMinID = m_pMin->m_iDocID; pQword->m_tDoc.m_iDocID = m_pMin->m_iDocID; if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_INLINE ) { pQword->m_iInlineAttrs = m_tSchema.GetDynamicSize(); pQword->m_pInlineFixup = m_pMin->m_pDynamic; } else { pQword->m_iInlineAttrs = 0; pQword->m_pInlineFixup = NULL; } pQword->m_iDocs = 0; pQword->m_iHits = 0; pQword->m_rdDoclist.SetFile ( rdDocs.GetFD(), rdDocs.GetFilename().cstr() ); pQword->m_rdDoclist.SeekTo ( rdDocs.GetPos(), READ_NO_SIZE_HINT ); pQword->m_rdHitlist.SetFile ( rdHits.GetFD(), rdHits.GetFilename().cstr() ); pQword->m_rdHitlist.SeekTo ( rdHits.GetPos(), READ_NO_SIZE_HINT ); CSphRowitem * pInlineStorage = NULL; if ( pQword->m_iInlineAttrs ) pInlineStorage = new CSphRowitem [ pQword->m_iInlineAttrs ]; // loop the doclist SphDocID_t uLastDocid = 0; int iDoclistDocs = 0; int iDoclistHits = 0; int iHitlistHits = 0; for ( ;; ) { const CSphMatch & tDoc = pQword->GetNextDoc ( pInlineStorage ); if ( !tDoc.m_iDocID ) break; // checks! if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN ) { const CSphRowitem * pFound = FindDocinfo ( tDoc.m_iDocID ); if ( !pFound ) LOC_FAIL(( fp, "row not found (wordid="UINT64_FMT"(%s), docid="DOCID_FMT")", uint64_t(uWordid), sWord, tDoc.m_iDocID )); if ( pFound ) if ( tDoc.m_iDocID!=DOCINFO2ID(pFound) ) LOC_FAIL(( fp, "row found but id mismatches (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", found="DOCID_FMT")", uint64_t(uWordid), sWord, tDoc.m_iDocID, DOCINFO2ID(pFound) )); } if ( tDoc.m_iDocID<=uLastDocid ) LOC_FAIL(( fp, "docid decreased (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", lastid="DOCID_FMT")", uint64_t(uWordid), sWord, tDoc.m_iDocID, uLastDocid )); uLastDocid = tDoc.m_iDocID; iDoclistDocs++; iDoclistHits += pQword->m_uMatchHits; // check position in case of regular (not-inline) hit if (!( pQword->m_iHitlistPos>>63 )) { if ( !bWordDict && pQword->m_iHitlistPos!=pQword->m_rdHitlist.GetPos() ) LOC_FAIL(( fp, "unexpected hitlist offset (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", expected="INT64_FMT", actual="INT64_FMT")", (uint64_t)uWordid, sWord, pQword->m_tDoc.m_iDocID, (int64_t)pQword->m_iHitlistPos, (int64_t)pQword->m_rdHitlist.GetPos() )); } // aim pQword->SeekHitlist ( pQword->m_iHitlistPos ); // loop the hitlist int iDocHits = 0; CSphSmallBitvec dFieldMask; dFieldMask.Unset(); Hitpos_t uLastHit = EMPTY_HIT; for ( ;; ) { Hitpos_t uHit = pQword->GetNextHit(); if ( uHit==EMPTY_HIT ) break; if (!( uLastHitm_tDoc.m_iDocID, uHit, uLastHit )); uLastHit = uHit; int iField = HITMAN::GetField ( uHit ); if ( iField<0 || iField>=SPH_MAX_FIELDS ) { LOC_FAIL(( fp, "hit field out of bounds (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", field=%d)", (uint64_t)uWordid, sWord, pQword->m_tDoc.m_iDocID, iField )); } else if ( iField>=m_tSchema.m_dFields.GetLength() ) { LOC_FAIL(( fp, "hit field out of schema (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", field=%d)", (uint64_t)uWordid, sWord, pQword->m_tDoc.m_iDocID, iField )); } dFieldMask.Set(iField); iDocHits++; // to check doclist entry iHitlistHits++; // to check dictionary entry } // check hit count if ( iDocHits!=(int)pQword->m_uMatchHits ) LOC_FAIL(( fp, "doc hit count mismatch (wordid="UINT64_FMT"(%s), docid="DOCID_FMT", doclist=%d, hitlist=%d)", (uint64_t)uWordid, sWord, pQword->m_tDoc.m_iDocID, pQword->m_uMatchHits, iDocHits )); // check the mask if ( dFieldMask!=pQword->m_dQwordFields ) LOC_FAIL(( fp, "field mask mismatch (wordid="UINT64_FMT"(%s), docid="DOCID_FMT")", (uint64_t)uWordid, sWord, pQword->m_tDoc.m_iDocID )); // update my hitlist reader rdHits.SeekTo ( pQword->m_rdHitlist.GetPos(), READ_NO_SIZE_HINT ); } // do checks if ( iDictDocs!=iDoclistDocs ) LOC_FAIL(( fp, "doc count mismatch (wordid="UINT64_FMT"(%s), dict=%d, doclist=%d)", uint64_t(uWordid), sWord, iDictDocs, iDoclistDocs )); if ( iDictHits!=iDoclistHits || iDictHits!=iHitlistHits ) LOC_FAIL(( fp, "hit count mismatch (wordid="UINT64_FMT"(%s), dict=%d, doclist=%d, hitlist=%d)", uint64_t(uWordid), sWord, iDictHits, iDoclistHits, iHitlistHits )); // move my reader instance forward too rdDocs.SeekTo ( pQword->m_rdDoclist.GetPos(), READ_NO_SIZE_HINT ); // cleanup SafeDelete ( pInlineStorage ); SafeDelete ( pQword ); // progress bar if ( (++iWordsChecked)%1000==0 && bProgress ) { fprintf ( fp, "%d/%d\r", iWordsChecked, iWordsTotal ); fflush ( fp ); } } /////////////////////////// // check rows (attributes) /////////////////////////// if ( m_tSettings.m_eDocinfo==SPH_DOCINFO_EXTERN && !m_pDocinfo.IsEmpty() ) { fprintf ( fp, "checking rows...\n" ); // sizes and counts DWORD uRowsTotal = m_uDocinfo; DWORD uStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); DWORD uAllRowsTotal = uRowsTotal; uAllRowsTotal += 2*(1+m_uDocinfoIndex); // should had been fixed up to v.20 by the loader if ( uAllRowsTotal*uStride!=m_pDocinfo.GetNumEntries() ) LOC_FAIL(( fp, "rowitems count mismatch (expected=%u, loaded="INT64_FMT")", uAllRowsTotal*uStride, (int64_t)m_pDocinfo.GetNumEntries() )); // extract rowitem indexes for MVAs etc // (ie. attr types that we can and will run additional checks on) CSphVector dMvaItems; CSphVector dFloatItems; CSphVector dStrItems; for ( int i=0; i dStringOffsets; if ( m_pStrings.GetNumEntries()>1 ) { const BYTE * pBase = m_pStrings.GetWritePtr(); const BYTE * pCur = pBase + 1; const BYTE * pMax = pBase + m_pStrings.GetNumEntries(); while ( pCurpMax || pStrpCur+4 ) { LOC_FAIL(( fp, "string length out of bounds (offset=%u, len=%d)", (DWORD)(pCur-pBase), iLen )); break; } dStringOffsets.Add ( (DWORD)(pCur-pBase) ); pCur = pStr + iLen; } } // loop the rows const CSphRowitem * pRow = m_pDocinfo.GetWritePtr(); const DWORD * pMvaBase = m_pMva.GetWritePtr(); const DWORD * pMvaMax = pMvaBase + m_pMva.GetNumEntries(); const DWORD * pMva = pMvaBase; int iOrphan = 0; SphDocID_t uLastID = 0; for ( DWORD uRow=0; uRow=pMvaMax ) { bIsSpaValid = false; LOC_FAIL(( fp, "MVA index out of bounds (row=%u, mvaattr=%d, docid="DOCID_FMT", index=%u)", uRow, iItem, uLastID, uOffset )); } if ( uOffset && pMvaBase+uOffset=pMvaMax ) { LOC_FAIL(( fp, "MVA index out of bounds (row=%u, mvaattr=%d, docid expected="DOCID_FMT", got="DOCID_FMT", index=%u)", uRow, iItem, uLastID, uMvaID, (DWORD)(pMva-pMvaBase) )); bIsMvaCorrect = false; continue; } // check values DWORD uValues = *pMva++; if ( pMva+uValues-1>=pMvaMax ) { LOC_FAIL(( fp, "MVA count out of bounds (row=%u, mvaattr=%d, docid expected="DOCID_FMT", got="DOCID_FMT", count=%u)", uRow, iItem, uLastID, uMvaID, uValues )); pMva += uValues; bIsMvaCorrect = false; continue; } // check that values are ascending for ( DWORD uVal=(iItem>=iMva64 ? 2 : 1); uVal=iMva64 ) { uPrev = MVA_UPSIZE ( pMva+uVal-2 ); uCur = MVA_UPSIZE ( pMva+uVal ); uVal += 2; } else { uPrev = pMva[uVal-1]; uCur = pMva[uVal]; uVal++; } if ( uCur<=uPrev ) { LOC_FAIL(( fp, "unsorted MVA values (row=%u, mvaattr=%d, docid expected="DOCID_FMT", got="DOCID_FMT", val[%u]=%u, val[%u]=%u)", uRow, iItem, uLastID, uMvaID, ( iItem>=iMva64 ? uVal-2 : uVal-1 ), (unsigned int)uPrev, uVal, (unsigned int)uCur )); bIsMvaCorrect = false; } uVal += ( iItem>=iMva64 ? 2 : 1 ); } pMva += uValues; } if ( !bIsMvaCorrect ) break; // orphan only ON no errors && ( not matched ids || ids matched multiply times ) if ( bIsMvaCorrect && ( uMvaID!=uLastID || ( uMvaID==uLastID && bLastIDChecked ) ) ) iOrphan++; bLastIDChecked |= uLastID==uMvaID; } if ( !bLastIDChecked && bHasValues ) LOC_FAIL(( fp, "missed or damaged MVA (row=%u, docid expected="DOCID_FMT")", uRow, uLastID )); } /////////////////////////// // check floats /////////////////////////// ARRAY_FOREACH ( iItem, dFloatItems ) { const CSphRowitem * pAttrs = DOCINFO2ATTRS(pRow); const DWORD uValue = (DWORD)sphGetRowAttr ( pAttrs, dFloatItems[ iItem ] ); const DWORD uExp = ( uValue >> 23 ) & 0xff; const DWORD uMantissa = uValue & 0x003fffff; // check normalized if ( uExp==0 && uMantissa!=0 ) LOC_FAIL(( fp, "float attribute value is unnormalized (row=%u, attr=%d, id="DOCID_FMT", raw=0x%x, value=%f)", uRow, iItem, uLastID, uValue, sphDW2F ( uValue ) )); // check +-inf if ( uExp==0xff && uMantissa==0 ) LOC_FAIL(( fp, "float attribute is infinity (row=%u, attr=%d, id="DOCID_FMT", raw=0x%x, value=%f)", uRow, iItem, uLastID, uValue, sphDW2F ( uValue ) )); } ///////////////// // check strings ///////////////// ARRAY_FOREACH ( iItem, dStrItems ) { const CSphRowitem * pAttrs = DOCINFO2ATTRS(pRow); const DWORD uOffset = (DWORD)sphGetRowAttr ( pAttrs, dStrItems[ iItem ] ); if ( uOffset>=m_pStrings.GetNumEntries() ) { LOC_FAIL(( fp, "string offset out of bounds (row=%u, stringattr=%d, docid="DOCID_FMT", index=%u)", uRow, iItem, uLastID, uOffset )); continue; } if ( !uOffset ) continue; const BYTE * pStr = NULL; const int iLen = sphUnpackStr ( m_pStrings.GetWritePtr() + uOffset, &pStr ); // check that length is sane if ( pStr+iLen-1>=m_pStrings.GetWritePtr()+m_pStrings.GetLength() ) { LOC_FAIL(( fp, "string length out of bounds (row=%u, stringattr=%d, docid="DOCID_FMT", index=%u)", uRow, iItem, uLastID, (unsigned int)( pStr-m_pStrings.GetWritePtr()+iLen-1 ) )); continue; } // check that offset is one of the good ones // (that is, that we don't point in the middle of some other data) if ( !dStringOffsets.BinarySearch ( uOffset ) ) { LOC_FAIL(( fp, "string offset is not a string start (row=%u, stringattr=%d, docid="DOCID_FMT", offset=%u)", uRow, iItem, uLastID, uOffset )); } } // progress bar if ( uRow%1000==0 && bProgress ) { fprintf ( fp, "%d/%d\r", uRow, uRowsTotal ); fflush ( fp ); } } if ( iOrphan ) fprintf ( fp, "WARNING: %d orphaned MVA entries were found\n", iOrphan ); /////////////////////////// // check blocks index /////////////////////////// fprintf ( fp, "checking attribute blocks index...\n" ); // check size const DWORD uTempDocinfoIndex = ( m_uDocinfo+DOCINFO_INDEX_FREQ-1 ) / DOCINFO_INDEX_FREQ; if ( uTempDocinfoIndex!=m_uDocinfoIndex ) LOC_FAIL(( fp, "block count differs (expected=%d, got=%d)", uTempDocinfoIndex, m_uDocinfoIndex )); const DWORD uMinMaxStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); const DWORD * pDocinfoIndexMax = m_pDocinfoIndex + 2*( 1+m_uDocinfoIndex )*uMinMaxStride; for ( DWORD uIndexEntry=0; uIndexEntry pDocinfoIndexMax ) LOC_FAIL(( fp, "unexpected block index end (row=%u, docid="DOCID_FMT", block=%d, max=%u, cur=%u)", uIndexEntry, uDocID, uBlock, (DWORD)(pDocinfoIndexMax-m_pDocinfoIndex), (DWORD)(pMaxEntry+uMinMaxStride-m_pDocinfoIndex) )); // check attribute location vs global range if ( pMaxAttrs+uMinMaxStride > pDocinfoIndexMax ) LOC_FAIL(( fp, "attribute position out of blocks index (row=%u, docid="DOCID_FMT", block=%u, expected<%u, got=%u)", uIndexEntry, uDocID, uBlock, (DWORD)(pDocinfoIndexMax-m_pDocinfoIndex), (DWORD)(pMaxAttrs+uMinMaxStride-m_pDocinfoIndex) )); const SphDocID_t uMinDocID = *(SphDocID_t*)pMinEntry; const SphDocID_t uMaxDocID = *(SphDocID_t*)pMaxEntry; // checks is docid min max range valid if ( uMinDocID > uMaxDocID && bIsBordersCheckTime ) LOC_FAIL(( fp, "invalid docid range (row=%u, block=%d, min="DOCID_FMT", max="DOCID_FMT")", uIndexEntry, uBlock, uMinDocID, uMaxDocID )); // checks docid vs blocks range if ( uDocID < uMinDocID || uDocID > uMaxDocID ) LOC_FAIL(( fp, "unexpected docid range (row=%u, docid="DOCID_FMT", block=%d, min="DOCID_FMT", max="DOCID_FMT")", uIndexEntry, uDocID, uBlock, uMinDocID, uMaxDocID )); bool bIsFirstMva = true; // check values vs blocks range const DWORD * pSpaRow = DOCINFO2ATTRS(pAttr); for ( int iItem=0; iItem uMax && bIsBordersCheckTime ) LOC_FAIL(( fp, "invalid attribute range (row=%u, block=%d, min="INT64_FMT", max="INT64_FMT")", uIndexEntry, uBlock, uMin, uMax )); if ( uVal < uMin || uVal > uMax ) LOC_FAIL(( fp, "unexpected attribute value (row=%u, attr=%u, docid="DOCID_FMT", block=%d, value=0x%x, min=0x%x, max=0x%x)", uIndexEntry, iItem, uDocID, uBlock, (DWORD)uVal, (DWORD)uMin, (DWORD)uMax )); } break; case SPH_ATTR_FLOAT: { const float fVal = sphDW2F ( (DWORD)sphGetRowAttr ( pSpaRow, tCol.m_tLocator ) ); const float fMin = sphDW2F ( (DWORD)sphGetRowAttr ( pMinAttrs, tCol.m_tLocator ) ); const float fMax = sphDW2F ( (DWORD)sphGetRowAttr ( pMaxAttrs, tCol.m_tLocator ) ); // checks is attribute min max range valid if ( fMin > fMax && bIsBordersCheckTime ) LOC_FAIL(( fp, "invalid attribute range (row=%u, block=%d, min=%f, max=%f)", uIndexEntry, uBlock, fMin, fMax )); if ( fVal < fMin || fVal > fMax ) LOC_FAIL(( fp, "unexpected attribute value (row=%u, attr=%u, docid="DOCID_FMT", block=%d, value=%f, min=%f, max=%f)", uIndexEntry, iItem, uDocID, uBlock, fVal, fMin, fMax )); } break; case SPH_ATTR_UINT32SET: { const DWORD uMin = (DWORD)sphGetRowAttr ( pMinAttrs, tCol.m_tLocator ); const DWORD uMax = (DWORD)sphGetRowAttr ( pMaxAttrs, tCol.m_tLocator ); // checks is MVA attribute min max range valid if ( uMin > uMax && bIsBordersCheckTime && uMin!=0xffffffff && uMax!=0 ) LOC_FAIL(( fp, "invalid MVA range (row=%u, block=%d, min=0x%x, max=0x%x)", uIndexEntry, uBlock, uMin, uMax )); SphAttr_t uOff = sphGetRowAttr ( pSpaRow, tCol.m_tLocator ); if ( !uOff ) break; const DWORD * pMva = m_pMva.GetWritePtr() + uOff; const DWORD * pMvaDocID = bIsFirstMva ? ( pMva - sizeof(SphDocID_t) / sizeof(DWORD) ) : NULL; bIsFirstMva = false; if ( uOff>=(SphAttr_t)m_pMva.GetNumEntries() ) break; if ( pMvaDocID && DOCINFO2ID ( pMvaDocID )!=uDocID ) { LOC_FAIL(( fp, "unexpected MVA docid (row=%u, mvaattr=%d, expected="DOCID_FMT", got="DOCID_FMT", block=%d, index=%u)", uIndexEntry, iItem, uDocID, DOCINFO2ID ( pMvaDocID ), uBlock, (DWORD)uOff )); break; } // check values const DWORD uValues = *pMva++; if ( uOff+uValues>(SphAttr_t)m_pMva.GetNumEntries() ) break; for ( DWORD iVal=0; iVal uMax ) LOC_FAIL(( fp, "unexpected MVA value (row=%u, attr=%u, docid="DOCID_FMT", block=%d, index=%u, value=0x%x, min=0x%x, max=0x%x)", uIndexEntry, iItem, uDocID, uBlock, iVal, (DWORD)uVal, (DWORD)uMin, (DWORD)uMax )); } } break; default: break; } } // progress bar if ( uIndexEntry%1000==0 && bProgress ) { fprintf ( fp, "%d/%d\r", uIndexEntry, m_uDocinfo ); fflush ( fp ); } } } /////////////////////////// // check kill-list /////////////////////////// fprintf ( fp, "checking kill-list...\n" ); // check size if ( m_pKillList.GetNumEntries()!=m_iKillListSize ) LOC_FAIL(( fp, "kill-list size differs (expected=%d, got="INT64_FMT")", m_iKillListSize, (int64_t)m_pKillList.GetNumEntries() )); // check that ids are ascending for ( DWORD uID=1; uID m_dNormalForms; CSphMultiformContainer * m_pMultiWordforms; CSphOrderedHash < int, CSphString, CSphStrHashFunc, 1048576 > m_dHash; WordformContainer_t (); ~WordformContainer_t (); bool IsEqual ( const char * szFile, DWORD uCRC32 ); }; /// common CRC32/64 dictionary stuff struct CSphDictCRCTraits : CSphDict { CSphDictCRCTraits (); virtual ~CSphDictCRCTraits (); virtual void LoadStopwords ( const char * sFiles, ISphTokenizer * pTokenizer ); virtual bool LoadWordforms ( const char * szFile, ISphTokenizer * pTokenizer, const char * sIndex ); virtual bool SetMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ); virtual bool HasMorphology() const; virtual void ApplyStemmers ( BYTE * pWord ); virtual void Setup ( const CSphDictSettings & tSettings ) { m_tSettings = tSettings; } virtual const CSphDictSettings & GetSettings () const { return m_tSettings; } virtual const CSphVector & GetStopwordsFileInfos () { return m_dSWFileInfos; } virtual const CSphSavedFile & GetWordformsFileInfo () { return m_tWFFileInfo; } virtual const CSphMultiformContainer * GetMultiWordforms () const { return m_pWordforms ? m_pWordforms->m_pMultiWordforms : NULL; } static void SweepWordformContainers ( const char * szFile, DWORD uCRC32 ); virtual void DictBegin ( int iTmpDictFD, int iDictFD, int iDictLimit ); virtual void DictEntry ( SphWordID_t uWordID, BYTE * sKeyword, int iDocs, int iHits, SphOffset_t iDoclistOffset, SphOffset_t iDoclistLength ); virtual void DictEndEntries ( SphOffset_t iDoclistOffset ); virtual bool DictEnd ( SphOffset_t * pCheckpointsPos, int * pCheckpointsCount, int iMemLimit, CSphString & sError ); virtual bool DictIsError () const { return m_wrDict.IsError(); } protected: CSphVector < int > m_dMorph; #if USE_LIBSTEMMER CSphVector < sb_stemmer * > m_dStemmers; struct DescStemmer_t { CSphString m_sAlgo; CSphString m_sEnc; }; CSphVector m_dDescStemmers; #endif int m_iStopwords; ///< stopwords count SphWordID_t * m_pStopwords; ///< stopwords ID list CSphFixedVector m_dStopwordContainer; protected: bool ToNormalForm ( BYTE * pWord ); bool ParseMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ); SphWordID_t FilterStopword ( SphWordID_t uID ) const; ///< filter ID against stopwords list CSphDict * CloneBase ( CSphDictCRCTraits * pDict ) const; virtual bool HasState () const; CSphWriter m_wrDict; ///< final dict file writer CSphTightVector m_dCheckpoints; ///< checkpoint offsets int m_iEntries; ///< dictionary entries stored SphOffset_t m_iLastDoclistPos; SphWordID_t m_iLastWordID; private: WordformContainer_t * m_pWordforms; CSphVector m_dSWFileInfos; CSphSavedFile m_tWFFileInfo; CSphDictSettings m_tSettings; static CSphVector m_dWordformContainers; static WordformContainer_t * GetWordformContainer ( const char * szFile, DWORD uCRC32, const ISphTokenizer * pTokenizer, const char * sIndex ); static WordformContainer_t * LoadWordformContainer ( const char * szFile, DWORD uCRC32, const ISphTokenizer * pTokenizer, const char * sIndex ); bool InitMorph ( const char * szMorph, int iLength, bool bUseUTF8, CSphString & sError ); bool AddMorph ( int iMorph ); bool StemById ( BYTE * pWord, int iStemmer ); }; CSphVector < WordformContainer_t * > CSphDictCRCTraits::m_dWordformContainers; /// specialized CRC32/64 implementations template < bool CRC32DICT > struct CSphDictCRC : public CSphDictCRCTraits { inline SphWordID_t DoCrc ( const BYTE * pWord ) const; inline SphWordID_t DoCrc ( const BYTE * pWord, int iLen ) const; virtual SphWordID_t GetWordID ( BYTE * pWord ); virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ); virtual SphWordID_t GetWordIDWithMarkers ( BYTE * pWord ); virtual SphWordID_t GetWordIDNonStemmed ( BYTE * pWord ); virtual bool IsStopWord ( const BYTE * pWord ) const; virtual CSphDict * Clone () const { return CloneBase ( new CSphDictCRC() ); } }; ///////////////////////////////////////////////////////////////////////////// DWORD g_dSphinxCRC32 [ 256 ] = { 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d, }; DWORD sphCRC32 ( const BYTE * pString ) { // calc CRC DWORD crc = ~((DWORD)0); for ( const BYTE * p=pString; *p; p++ ) crc = (crc >> 8) ^ g_dSphinxCRC32 [ (crc ^ (*p)) & 0xff ]; return ~crc; } DWORD sphCRC32 ( const BYTE * pString, int iLen ) { // calc CRC DWORD crc = ~((DWORD)0); for ( int i=0; i> 8) ^ g_dSphinxCRC32 [ (crc ^ pString[i]) & 0xff ]; return ~crc; } DWORD sphCRC32 ( const BYTE * pString, int iLen, DWORD uPrevCRC ) { // calc CRC DWORD crc = ~((DWORD)uPrevCRC); for ( int i=0; i> 8) ^ g_dSphinxCRC32 [ (crc ^ pString[i]) & 0xff ]; return ~crc; } ///////////////////////////////////////////////////////////////////////////// uint64_t sphFNV64 ( const BYTE * s ) { uint64_t hval = 0xcbf29ce484222325ULL; while ( *s ) { // xor the bottom with the current octet hval ^= (uint64_t)*s++; // multiply by the 64 bit FNV magic prime mod 2^64 hval += (hval << 1) + (hval << 4) + (hval << 5) + (hval << 7) + (hval << 8) + (hval << 40); // gcc optimization } return hval; } uint64_t sphFNV64 ( const BYTE * s, int iLen, uint64_t uPrev ) { uint64_t hval = uPrev; for ( ; iLen>0; iLen-- ) { // xor the bottom with the current octet hval ^= (uint64_t)*s++; // multiply by the 64 bit FNV magic prime mod 2^64 hval += (hval << 1) + (hval << 4) + (hval << 5) + (hval << 7) + (hval << 8) + (hval << 40); // gcc optimization } return hval; } ///////////////////////////////////////////////////////////////////////////// bool sphCalcFileCRC32 ( const char * szFilename, DWORD & uCRC32 ) { uCRC32 = 0; if ( !szFilename ) return false; FILE * pFile = fopen ( szFilename, "rb" ); if ( !pFile ) return false; DWORD crc = ~((DWORD)0); const int BUFFER_SIZE = 131072; static BYTE * pBuffer = NULL; if ( !pBuffer ) pBuffer = new BYTE [ BUFFER_SIZE ]; int iBytesRead; while ( ( iBytesRead = fread ( pBuffer, 1, BUFFER_SIZE, pFile ) )!=0 ) { for ( int i=0; i> 8) ^ g_dSphinxCRC32 [ (crc ^ pBuffer[i]) & 0xff ]; } fclose ( pFile ); uCRC32 = ~crc; return true; } static void GetFileStats ( const char * szFilename, CSphSavedFile & tInfo ) { if ( !szFilename ) { memset ( &tInfo, 0, sizeof ( tInfo ) ); return; } tInfo.m_sFilename = szFilename; struct_stat tStat; memset ( &tStat, 0, sizeof ( tStat ) ); if ( stat ( szFilename, &tStat ) < 0 ) memset ( &tStat, 0, sizeof ( tStat ) ); tInfo.m_uSize = tStat.st_size; tInfo.m_uCTime = tStat.st_ctime; tInfo.m_uMTime = tStat.st_mtime; DWORD uCRC32 = 0; sphCalcFileCRC32 ( szFilename, uCRC32 ); tInfo.m_uCRC32 = uCRC32; } ///////////////////////////////////////////////////////////////////////////// WordformContainer_t::WordformContainer_t () : m_iRefCount ( 0 ) , m_uTokenizerFNV ( 0 ) , m_pMultiWordforms ( NULL ) { } WordformContainer_t::~WordformContainer_t () { if ( m_pMultiWordforms ) { m_pMultiWordforms->m_Hash.IterateStart (); while ( m_pMultiWordforms->m_Hash.IterateNext () ) { CSphMultiforms * pWordforms = m_pMultiWordforms->m_Hash.IterateGet (); ARRAY_FOREACH ( i, pWordforms->m_dWordforms ) SafeDelete ( pWordforms->m_dWordforms[i] ); SafeDelete ( pWordforms ); } SafeDelete ( m_pMultiWordforms ); } } bool WordformContainer_t::IsEqual ( const char * szFile, DWORD uCRC32 ) { if ( !szFile ) return false; struct_stat FileStat; if ( stat ( szFile, &FileStat ) < 0 ) return false; return m_sFilename==szFile && m_tStat.st_ctime==FileStat.st_ctime && m_tStat.st_mtime==FileStat.st_mtime && m_tStat.st_size==FileStat.st_size && m_uCRC32==uCRC32; } ///////////////////////////////////////////////////////////////////////////// CSphDictCRCTraits::CSphDictCRCTraits () : m_iStopwords ( 0 ) , m_pStopwords ( NULL ) , m_dStopwordContainer ( 0 ) , m_iEntries ( 0 ) , m_iLastDoclistPos ( 0 ) , m_iLastWordID ( 0 ) , m_pWordforms ( NULL ) { } CSphDictCRCTraits::~CSphDictCRCTraits () { #if USE_LIBSTEMMER ARRAY_FOREACH ( i, m_dStemmers ) sb_stemmer_delete ( m_dStemmers[i] ); #endif if ( m_pWordforms ) --m_pWordforms->m_iRefCount; } SphWordID_t CSphDictCRCTraits::FilterStopword ( SphWordID_t uID ) const { if ( !m_iStopwords ) return uID; // OPTIMIZE: binary search is not too good, could do some hashing instead SphWordID_t * pStart = m_pStopwords; SphWordID_t * pEnd = m_pStopwords + m_iStopwords - 1; do { if ( uID==*pStart || uID==*pEnd ) return 0; if ( uID<*pStart || uID>*pEnd ) return uID; SphWordID_t * pMid = pStart + (pEnd-pStart)/2; if ( uID==*pMid ) return 0; if ( uID<*pMid ) pEnd = pMid; else pStart = pMid; } while ( pEnd-pStart>1 ); return uID; } bool CSphDictCRCTraits::ToNormalForm ( BYTE * pWord ) { if ( !m_pWordforms ) return false; int * pIndex = m_pWordforms->m_dHash ( (char *)pWord ); if ( !pIndex ) return false; if ( *pIndex<0 || *pIndex>=m_pWordforms->m_dNormalForms.GetLength () ) return false; if ( m_pWordforms->m_dNormalForms [*pIndex].IsEmpty () ) return false; strcpy ( (char *)pWord, m_pWordforms->m_dNormalForms[*pIndex].cstr() ); // NOLINT return true; } bool CSphDictCRCTraits::ParseMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ) { const char * szStart = szMorph; while ( *szStart ) { while ( *szStart && ( sphIsSpace ( *szStart ) || *szStart==',' ) ) ++szStart; if ( !*szStart ) break; const char * szWordStart = szStart; while ( *szStart && !sphIsSpace ( *szStart ) && *szStart!=',' ) ++szStart; if ( szStart - szWordStart > 0 ) { if ( !InitMorph ( szWordStart, szStart - szWordStart, bUseUTF8, sError ) ) return false; } } return true; } bool CSphDictCRCTraits::InitMorph ( const char * szMorph, int iLength, bool bUseUTF8, CSphString & sError ) { if ( iLength==0 ) return true; if ( iLength==4 && !strncmp ( szMorph, "none", iLength ) ) return true; if ( iLength==7 && !strncmp ( szMorph, "stem_en", iLength ) ) { stem_en_init (); return AddMorph ( SPH_MORPH_STEM_EN ); } if ( iLength==7 && !strncmp ( szMorph, "stem_ru", iLength ) ) { stem_ru_init (); return AddMorph ( bUseUTF8 ? SPH_MORPH_STEM_RU_UTF8 : SPH_MORPH_STEM_RU_CP1251 ); } if ( iLength==7 && !strncmp ( szMorph, "stem_cz", iLength ) ) { stem_cz_init (); return AddMorph ( SPH_MORPH_STEM_CZ ); } if ( iLength==9 && !strncmp ( szMorph, "stem_enru", iLength ) ) { stem_en_init (); stem_ru_init (); if ( !AddMorph ( SPH_MORPH_STEM_EN ) ) return false; return AddMorph ( bUseUTF8 ? SPH_MORPH_STEM_RU_UTF8 : SPH_MORPH_STEM_RU_CP1251 ); } if ( iLength==7 && !strncmp ( szMorph, "soundex", iLength ) ) return AddMorph ( SPH_MORPH_SOUNDEX ); if ( iLength==9 && !strncmp ( szMorph, "metaphone", iLength ) ) return AddMorph ( bUseUTF8 ? SPH_MORPH_METAPHONE_UTF8 : SPH_MORPH_METAPHONE_SBCS ); sError = ""; #if USE_LIBSTEMMER const int LIBSTEMMER_LEN = 11; const int MAX_ALGO_LENGTH = 64; if ( iLength > LIBSTEMMER_LEN && iLength - LIBSTEMMER_LEN < MAX_ALGO_LENGTH && !strncmp ( szMorph, "libstemmer_", LIBSTEMMER_LEN ) ) { CSphString sAlgo; CSphString sEnc; sAlgo.SetBinary ( szMorph+LIBSTEMMER_LEN, iLength - LIBSTEMMER_LEN ); sb_stemmer * pStemmer = NULL; if ( bUseUTF8 ) { sEnc = "UTF_8"; pStemmer = sb_stemmer_new ( sAlgo.cstr(), sEnc.cstr() ); } else { sEnc = "ISO_8859_1"; pStemmer = sb_stemmer_new ( sAlgo.cstr(), sEnc.cstr() ); if ( !pStemmer ) { sEnc = "ISO_8859_2"; pStemmer = sb_stemmer_new ( sAlgo.cstr(), sEnc.cstr() ); } if ( !pStemmer ) { sEnc = "KOI8_R"; pStemmer = sb_stemmer_new ( sAlgo.cstr(), sEnc.cstr() ); } } if ( !pStemmer ) { sError.SetSprintf ( "libstemmer morphology algorithm '%s' not available for %s encoding - IGNORED", sAlgo.cstr(), bUseUTF8 ? "UTF-8" : "SBCS" ); return false; } AddMorph ( SPH_MORPH_LIBSTEMMER_FIRST + m_dStemmers.GetLength () ); ARRAY_FOREACH ( i, m_dStemmers ) { if ( m_dStemmers[i]==pStemmer ) { sb_stemmer_delete ( pStemmer ); return false; } } m_dStemmers.Add ( pStemmer ); DescStemmer_t & tDesc = m_dDescStemmers.Add(); tDesc.m_sAlgo.Swap ( sAlgo ); tDesc.m_sEnc.Swap ( sEnc ); return true; } #endif return false; } bool CSphDictCRCTraits::AddMorph ( int iMorph ) { ARRAY_FOREACH ( i, m_dMorph ) if ( m_dMorph[i]==iMorph ) return false; m_dMorph.Add ( iMorph ); return true; } void CSphDictCRCTraits::ApplyStemmers ( BYTE * pWord ) { // try wordforms if ( ToNormalForm ( pWord ) ) return; // check length if ( m_tSettings.m_iMinStemmingLen>1 ) if ( sphUTF8Len ( (const char*)pWord )m_iStopwords = m_iStopwords; pDict->m_pStopwords = m_pStopwords; pDict->m_pWordforms = m_pWordforms; if ( m_pWordforms ) m_pWordforms->m_iRefCount++; pDict->m_dMorph = m_dMorph; #if USE_LIBSTEMMER assert ( m_dDescStemmers.GetLength()==m_dStemmers.GetLength() ); pDict->m_dDescStemmers = m_dDescStemmers; ARRAY_FOREACH ( i, m_dDescStemmers ) { pDict->m_dStemmers.Add ( sb_stemmer_new ( m_dDescStemmers[i].m_sAlgo.cstr(), m_dDescStemmers[i].m_sEnc.cstr() ) ); assert ( pDict->m_dStemmers.Last() ); } #endif return pDict; } bool CSphDictCRCTraits::HasState() const { #if !USE_LIBSTEMMER return false; #else return ( m_dDescStemmers.GetLength()>0 ); #endif } ///////////////////////////////////////////////////////////////////////////// template<> SphWordID_t CSphDictCRC::DoCrc ( const BYTE * pWord ) const { return sphCRC32 ( pWord ); } template<> SphWordID_t CSphDictCRC::DoCrc ( const BYTE * pWord ) const { return (SphWordID_t) sphFNV64 ( pWord ); } template<> SphWordID_t CSphDictCRC::DoCrc ( const BYTE * pWord, int iLen ) const { return sphCRC32 ( pWord, iLen ); } template<> SphWordID_t CSphDictCRC::DoCrc ( const BYTE * pWord, int iLen ) const { return (SphWordID_t) sphFNV64 ( pWord, iLen ); } template < bool CRC32DICT > SphWordID_t CSphDictCRC::GetWordID ( BYTE * pWord ) { // skip stemmers for magic words if ( pWord[0]>=0x20 ) ApplyStemmers ( pWord ); return FilterStopword ( DoCrc ( pWord ) ); } template < bool CRC32DICT > SphWordID_t CSphDictCRC::GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ) { SphWordID_t uId = DoCrc ( pWord, iLen ); return bFilterStops ? FilterStopword ( uId ) : uId; } template < bool CRC32DICT > SphWordID_t CSphDictCRC::GetWordIDWithMarkers ( BYTE * pWord ) { ApplyStemmers ( pWord + 1 ); SphWordID_t uWordId = DoCrc ( pWord + 1 ); int iLength = strlen ( (const char *)(pWord + 1) ); pWord [iLength + 1] = MAGIC_WORD_TAIL; pWord [iLength + 2] = '\0'; return FilterStopword ( uWordId ) ? DoCrc ( pWord ) : 0; } template < bool CRC32DICT > SphWordID_t CSphDictCRC::GetWordIDNonStemmed ( BYTE * pWord ) { SphWordID_t uWordId = DoCrc ( pWord + 1 ); if ( !FilterStopword ( uWordId ) ) return 0; return DoCrc ( pWord ); } template < bool CRC32DICT > bool CSphDictCRC::IsStopWord ( const BYTE * pWord ) const { return FilterStopword ( DoCrc ( pWord ) )==0; } ////////////////////////////////////////////////////////////////////////// void CSphDictCRCTraits::LoadStopwords ( const char * sFiles, ISphTokenizer * pTokenizer ) { assert ( !m_pStopwords ); assert ( !m_iStopwords ); // tokenize file list if ( !sFiles || !*sFiles ) return; m_dSWFileInfos.Resize ( 0 ); char * sList = new char [ 1+strlen(sFiles) ]; strcpy ( sList, sFiles ); // NOLINT char * pCur = sList; char * sName = NULL; CSphVector dStop; for ( ;; ) { // find next name start while ( *pCur && isspace(*pCur) ) pCur++; if ( !*pCur ) break; sName = pCur; // find next name end while ( *pCur && !isspace(*pCur) ) pCur++; if ( *pCur ) *pCur++ = '\0'; BYTE * pBuffer = NULL; CSphSavedFile tInfo; tInfo.m_sFilename = sName; GetFileStats ( sName, tInfo ); m_dSWFileInfos.Add ( tInfo ); // open file struct_stat st; if ( stat ( sName, &st )==0 ) pBuffer = new BYTE [(size_t)st.st_size]; else { sphWarn ( "stopwords: failed to get file size for '%s'", sName ); continue; } FILE * fp = fopen ( sName, "rb" ); if ( !fp ) { sphWarn ( "failed to load stopwords from '%s'", sName ); SafeDeleteArray ( pBuffer ); continue; } // tokenize file int iLength = (int)fread ( pBuffer, 1, (size_t)st.st_size, fp ); BYTE * pToken; pTokenizer->SetBuffer ( pBuffer, iLength ); while ( ( pToken = pTokenizer->GetToken() )!=NULL ) dStop.Add ( GetWordID ( pToken ) ); // close file fclose ( fp ); SafeDeleteArray ( pBuffer ); } SafeDeleteArray ( sList ); // sort stopwords dStop.Uniq(); // store IDs if ( dStop.GetLength() ) { m_dStopwordContainer.Reset ( dStop.GetLength() ); ARRAY_FOREACH ( i, dStop ) m_dStopwordContainer[i] = dStop[i]; m_iStopwords = m_dStopwordContainer.GetLength (); m_pStopwords = m_dStopwordContainer.Begin(); } } void CSphDictCRCTraits::SweepWordformContainers ( const char * szFile, DWORD uCRC32 ) { for ( int i = 0; i < m_dWordformContainers.GetLength (); ) { WordformContainer_t * WC = m_dWordformContainers[i]; if ( WC->m_iRefCount==0 && !WC->IsEqual ( szFile, uCRC32 ) ) { delete WC; m_dWordformContainers.Remove ( i ); } else ++i; } } WordformContainer_t * CSphDictCRCTraits::GetWordformContainer ( const char * szFile, DWORD uCRC32, const ISphTokenizer * pTokenizer, const char * sIndex ) { ARRAY_FOREACH ( i, m_dWordformContainers ) if ( m_dWordformContainers[i]->IsEqual ( szFile, uCRC32 ) ) { WordformContainer_t * pContainer = m_dWordformContainers[i]; if ( pTokenizer->GetSettingsFNV()==pContainer->m_uTokenizerFNV ) return pContainer; sphWarning ( "index %s: wordforms file %s is shared with index %s, but tokenizer settings are different; IGNORING wordforms", sIndex, szFile, pContainer->m_sIndexName.cstr() ); return NULL; } WordformContainer_t * pContainer = LoadWordformContainer ( szFile, uCRC32, pTokenizer, sIndex ); if ( pContainer ) m_dWordformContainers.Add ( pContainer ); return pContainer; } WordformContainer_t * CSphDictCRCTraits::LoadWordformContainer ( const char * szFile, DWORD uCRC32, const ISphTokenizer * pTokenizer, const char * sIndex ) { // stat it; we'll store stats for later checks struct_stat FileStat; if ( !szFile || !*szFile || stat ( szFile, &FileStat )<0 ) return NULL; // allocate it WordformContainer_t * pContainer = new WordformContainer_t; if ( !pContainer ) return NULL; pContainer->m_sFilename = szFile; pContainer->m_tStat = FileStat; pContainer->m_uCRC32 = uCRC32; pContainer->m_uTokenizerFNV = pTokenizer->GetSettingsFNV(); pContainer->m_sIndexName = sIndex; // open it CSphString sError; CSphAutoreader rdWordforms; if ( !rdWordforms.Open ( szFile, sError ) ) return NULL; // my tokenizer CSphScopedPtr pMyTokenizer ( pTokenizer->Clone ( false ) ); pMyTokenizer->AddSpecials ( ">" ); // scan it line by line char sBuffer [ 6*SPH_MAX_WORD_LEN + 512 ]; // enough to hold 2 UTF-8 words, plus some whitespace overhead int iLen; bool bSeparatorFound = false; CSphString sFrom; while ( ( iLen = rdWordforms.GetLine ( sBuffer, sizeof(sBuffer) ) )>=0 ) { // parse the line pMyTokenizer->SetBuffer ( (BYTE*)sBuffer, iLen ); CSphScopedPtr tMultiWordform ( NULL ); CSphString sKey; BYTE * pFrom = NULL; while ( ( pFrom = pMyTokenizer->GetToken () )!=NULL ) { const BYTE * pCur = (const BYTE *) pMyTokenizer->GetBufferPtr (); while ( isspace(*pCur) ) pCur++; if ( *pCur=='>' ) { sFrom = (const char*)pFrom; bSeparatorFound = true; pMyTokenizer->SetBufferPtr ( (const char*) pCur+1 ); break; } else { if ( !tMultiWordform.Ptr() ) { tMultiWordform = new CSphMultiform; sKey = (const char*)pFrom; } else tMultiWordform->m_dTokens.Add ( (const char*)pFrom ); } } if ( !pFrom ) continue; // FIXME! report parsing error if ( !bSeparatorFound ) continue; // FIXME! report parsing error BYTE * pTo = pMyTokenizer->GetToken (); if ( !pTo ) continue; // FIXME! report parsing error CSphString sTo ( (const char *)pTo ); const CSphString & sSourceWordform = tMultiWordform.Ptr() ? sTo : sFrom; // check wordform that source token is a new token or has same destination token int * pRefTo = pContainer->m_dHash ( sSourceWordform ); assert ( !pRefTo || ( *pRefTo>=0 && *pRefTom_dNormalForms.GetLength() ) ); if ( !tMultiWordform.Ptr() && pRefTo && pContainer->m_dNormalForms[*pRefTo]!=sTo ) { const CSphString & sRefTo = pContainer->m_dNormalForms[*pRefTo]; sphWarning ( "duplicate wordform found - skipped ( current='%s > %s', stored='%s > %s' ). Fix your wordforms file '%s'.", sSourceWordform.cstr(), sTo.cstr(), sSourceWordform.cstr(), sRefTo.cstr(), szFile ); } if ( pRefTo && !tMultiWordform.Ptr() ) continue; if ( !pRefTo ) { pContainer->m_dNormalForms.AddUnique ( sTo ); pContainer->m_dHash.Add ( pContainer->m_dNormalForms.GetLength()-1, sSourceWordform ); } if ( tMultiWordform.Ptr() ) { CSphMultiform * pMultiWordform = tMultiWordform.LeakPtr(); pMultiWordform->m_sNormalForm = sTo; pMultiWordform->m_iNormalTokenLen = pMyTokenizer->GetLastTokenLen (); pMultiWordform->m_dTokens.Add ( sFrom ); if ( !pContainer->m_pMultiWordforms ) pContainer->m_pMultiWordforms = new CSphMultiformContainer; CSphMultiforms ** pWordforms = pContainer->m_pMultiWordforms->m_Hash ( sKey ); if ( pWordforms ) { (*pWordforms)->m_dWordforms.Add ( pMultiWordform ); (*pWordforms)->m_iMinTokens = Min ( (*pWordforms)->m_iMinTokens, pMultiWordform->m_dTokens.GetLength () ); (*pWordforms)->m_iMaxTokens = Max ( (*pWordforms)->m_iMaxTokens, pMultiWordform->m_dTokens.GetLength () ); pContainer->m_pMultiWordforms->m_iMaxTokens = Max ( pContainer->m_pMultiWordforms->m_iMaxTokens, (*pWordforms)->m_iMaxTokens ); } else { CSphMultiforms * pNewWordforms = new CSphMultiforms; pNewWordforms->m_dWordforms.Add ( pMultiWordform ); pNewWordforms->m_iMinTokens = pMultiWordform->m_dTokens.GetLength (); pNewWordforms->m_iMaxTokens = pMultiWordform->m_dTokens.GetLength (); pContainer->m_pMultiWordforms->m_iMaxTokens = Max ( pContainer->m_pMultiWordforms->m_iMaxTokens, pNewWordforms->m_iMaxTokens ); pContainer->m_pMultiWordforms->m_Hash.Add ( pNewWordforms, sKey ); } } } return pContainer; } bool CSphDictCRCTraits::LoadWordforms ( const char * szFile, ISphTokenizer * pTokenizer, const char * sIndex ) { GetFileStats ( szFile, m_tWFFileInfo ); DWORD uCRC32 = m_tWFFileInfo.m_uCRC32; SweepWordformContainers ( szFile, uCRC32 ); m_pWordforms = GetWordformContainer ( szFile, uCRC32, pTokenizer, sIndex ); if ( m_pWordforms ) m_pWordforms->m_iRefCount++; return !!m_pWordforms; } bool CSphDictCRCTraits::SetMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ) { m_dMorph.Reset (); #if USE_LIBSTEMMER ARRAY_FOREACH ( i, m_dStemmers ) sb_stemmer_delete ( m_dStemmers[i] ); m_dStemmers.Reset (); #endif if ( !szMorph ) return true; CSphString sOption = szMorph; sOption.ToLower (); sError = ""; if ( !ParseMorphology ( sOption.cstr (), bUseUTF8, sError ) ) { m_dMorph.Reset (); if ( sError.IsEmpty () ) sError.SetSprintf ( "invalid morphology option '%s' - IGNORED", sOption.cstr() ); return false; } return true; } bool CSphDictCRCTraits::HasMorphology() const { return ( m_dMorph.GetLength()>0 ); } /// common id-based stemmer bool CSphDictCRCTraits::StemById ( BYTE * pWord, int iStemmer ) { char szBuf [ MAX_KEYWORD_BYTES ]; // safe quick strncpy without (!) padding and with a side of strlen char * p = szBuf; char * pMax = szBuf + sizeof(szBuf) - 1; BYTE * pLastSBS = NULL; while ( *pWord && p=iLen ) break; // stem only UTF8 tail if ( !pLastSBS ) { stem_ru_utf8 ( (WORD*)pWord ); } else { stem_ru_utf8 ( (WORD *)( pLastSBS+1 ) ); } break; case SPH_MORPH_STEM_CZ: stem_cz ( pWord ); break; case SPH_MORPH_SOUNDEX: stem_soundex ( pWord ); break; case SPH_MORPH_METAPHONE_SBCS: stem_dmetaphone ( pWord, false ); break; case SPH_MORPH_METAPHONE_UTF8: stem_dmetaphone ( pWord, true ); break; default: #if USE_LIBSTEMMER if ( iStemmer>=SPH_MORPH_LIBSTEMMER_FIRST && iStemmer m_iLastDoclistPos ); m_wrDict.ZipInt ( 0 ); // indicate checkpoint m_wrDict.ZipOffset ( iDoclistOffset - m_iLastDoclistPos ); // store last length } // restart delta coding, once per SPH_WORDLIST_CHECKPOINT entries m_iLastWordID = 0; m_iLastDoclistPos = 0; // begin new wordlist entry assert ( m_wrDict.GetPos()<=UINT_MAX ); CSphWordlistCheckpoint & tCheckpoint = m_dCheckpoints.Add(); tCheckpoint.m_iWordID = uWordID; tCheckpoint.m_iWordlistOffset = m_wrDict.GetPos(); } assert ( iDoclistOffset > m_iLastDoclistPos ); m_wrDict.ZipOffset ( uWordID - m_iLastWordID ); // FIXME! slow with 32bit wordids m_wrDict.ZipOffset ( iDoclistOffset - m_iLastDoclistPos ); m_iLastWordID = uWordID; m_iLastDoclistPos = iDoclistOffset; assert ( iDocs ); assert ( iHits ); m_wrDict.ZipInt ( iDocs ); m_wrDict.ZipInt ( iHits ); m_iEntries++; } void CSphDictCRCTraits::DictEndEntries ( SphOffset_t iDoclistOffset ) { assert ( iDoclistOffset>=m_iLastDoclistPos ); m_wrDict.ZipInt ( 0 ); // indicate checkpoint m_wrDict.ZipOffset ( iDoclistOffset - m_iLastDoclistPos ); // store last doclist length } ////////////////////////////////////////////////////////////////////////// // KEYWORDS STORING DICTIONARY ////////////////////////////////////////////////////////////////////////// class CSphDictKeywords : public CSphDictCRC { private: static const int SLOTS = 65536; static const int ENTRY_CHUNK = 65536; static const int KEYWORD_CHUNK = 1048576; static const int DICT_CHUNK = 65536; public: // OPTIMIZE? change pointers to 8:24 locators to save RAM on x64 gear? struct HitblockKeyword_t { SphWordID_t m_uWordid; // locally unique word id (crc value, adjusted in case of collsion) HitblockKeyword_t * m_pNextHash; // next hashed entry char * m_pKeyword; // keyword }; struct HitblockException_t { HitblockKeyword_t * m_pEntry; // hash entry SphWordID_t m_uCRC; // original unadjusted crc bool operator < ( const HitblockException_t & rhs ) const { return m_pEntry->m_uWordid < rhs.m_pEntry->m_uWordid; } }; struct DictKeyword_t { char * m_sKeyword; SphOffset_t m_uOff; int m_iDocs; int m_iHits; BYTE m_uHint; }; struct DictBlock_t { SphOffset_t m_iPos; int m_iLen; }; private: HitblockKeyword_t * m_dHash [ SLOTS ]; ///< hash by wordid (!) CSphVector m_dExceptions; bool m_bHitblock; ///< should we store words on GetWordID or not int m_iMemUse; ///< current memory use by all the chunks int m_iDictLimit; ///< allowed memory limit for dict block collection CSphVector m_dEntryChunks; ///< hash chunks, only used when indexing hitblocks HitblockKeyword_t * m_pEntryChunk; int m_iEntryChunkFree; CSphVector m_dKeywordChunks; ///< keyword storage BYTE * m_pKeywordChunk; int m_iKeywordChunkFree; CSphVector m_dDictChunks; ///< dict entry chunks, only used when sorting final dict DictKeyword_t * m_pDictChunk; int m_iDictChunkFree; int m_iTmpFD; ///< temp dict file descriptor CSphWriter m_wrTmpDict; ///< temp dict writer CSphVector m_dDictBlocks; ///< on-disk locations of dict entry blocks char m_sClippedWord[MAX_KEYWORD_BYTES]; ///< keyword storage for cliiped word private: SphWordID_t HitblockGetID ( const char * pWord, int iLen, SphWordID_t uCRC ); HitblockKeyword_t * HitblockAddKeyword ( DWORD uHash, const char * pWord, int iLen, SphWordID_t uID ); public: explicit CSphDictKeywords (); virtual ~CSphDictKeywords (); virtual void HitblockBegin () { m_bHitblock = true; } virtual void HitblockPatch ( CSphWordHit * pHits, int iHits ); virtual const char * HitblockGetKeyword ( SphWordID_t uWordID ); virtual int HitblockGetMemUse () { return m_iMemUse; } virtual void HitblockReset (); virtual void DictBegin ( int iTmpDictFD, int iDictFD, int iDictLimit ); virtual void DictEntry ( SphWordID_t uWordID, BYTE * sKeyword, int iDocs, int iHits, SphOffset_t iDoclistOffset, SphOffset_t iDoclistLength ); virtual void DictEndEntries ( SphOffset_t ) {} virtual bool DictEnd ( SphOffset_t * pCheckpointsPos, int * pCheckpointsCount, int iMemLimit, CSphString & sError ); virtual SphWordID_t GetWordID ( BYTE * pWord ); virtual SphWordID_t GetWordIDWithMarkers ( BYTE * pWord ); virtual SphWordID_t GetWordIDNonStemmed ( BYTE * pWord ); virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ); private: void DictFlush (); }; ////////////////////////////////////////////////////////////////////////// CSphDictKeywords::CSphDictKeywords () : m_bHitblock ( false ) , m_iMemUse ( 0 ) , m_iDictLimit ( 0 ) , m_pEntryChunk ( NULL ) , m_iEntryChunkFree ( 0 ) , m_pKeywordChunk ( NULL ) , m_iKeywordChunkFree ( 0 ) , m_pDictChunk ( NULL ) , m_iDictChunkFree ( 0 ) { memset ( m_dHash, 0, sizeof(m_dHash) ); } CSphDictKeywords::~CSphDictKeywords () { HitblockReset(); } void CSphDictKeywords::HitblockReset() { m_dExceptions.Resize ( 0 ); ARRAY_FOREACH ( i, m_dEntryChunks ) SafeDeleteArray ( m_dEntryChunks[i] ); m_dEntryChunks.Resize ( 0 ); m_pEntryChunk = NULL; m_iEntryChunkFree = 0; ARRAY_FOREACH ( i, m_dKeywordChunks ) SafeDeleteArray ( m_dKeywordChunks[i] ); m_dKeywordChunks.Resize ( 0 ); m_pKeywordChunk = NULL; m_iKeywordChunkFree = 0; m_iMemUse = 0; memset ( m_dHash, 0, sizeof(m_dHash) ); } CSphDictKeywords::HitblockKeyword_t * CSphDictKeywords::HitblockAddKeyword ( DWORD uHash, const char * sWord, int iLen, SphWordID_t uID ) { assert ( iLenm_pKeyword = (char*)m_pKeywordChunk; pEntry->m_uWordid = uID; m_pKeywordChunk += iLen; m_iKeywordChunkFree -= iLen; // mtf it pEntry->m_pNextHash = m_dHash [ uHash ]; m_dHash [ uHash ] = pEntry; return pEntry; } SphWordID_t CSphDictKeywords::HitblockGetID ( const char * sWord, int iLen, SphWordID_t uCRC ) { if ( iLen>=MAX_KEYWORD_BYTES-4 ) // fix of very long word (zones) { memcpy ( m_sClippedWord, sWord, MAX_KEYWORD_BYTES-4 ); memset ( m_sClippedWord+MAX_KEYWORD_BYTES-4, 0, 4 ); CSphString sOrig; sOrig.SetBinary ( sWord, iLen ); sphWarn ( "word overrun buffer, clipped!!!\nclipped (len=%d, word='%s')\noriginal (len=%d, word='%s')", MAX_KEYWORD_BYTES-4, m_sClippedWord, iLen, sOrig.cstr() ); sWord = m_sClippedWord; iLen = MAX_KEYWORD_BYTES-4; uCRC = sphCRC32 ( (const BYTE *)m_sClippedWord, MAX_KEYWORD_BYTES-4 ); } // is this a known one? find it // OPTIMIZE? in theory we could use something faster than crc32; but quick lookup3 test did not show any improvements const DWORD uHash = (DWORD)( uCRC % SLOTS ); HitblockKeyword_t * pEntry = m_dHash [ uHash ]; HitblockKeyword_t ** ppEntry = &m_dHash [ uHash ]; while ( pEntry ) { // check crc if ( pEntry->m_uWordid!=uCRC ) { // crc mismatch, try next entry ppEntry = &pEntry->m_pNextHash; pEntry = pEntry->m_pNextHash; continue; } // crc matches, check keyword register int iWordLen = iLen; register const char * a = pEntry->m_pKeyword; register const char * b = sWord; while ( *a==*b && iWordLen-- ) { if ( !*a || !iWordLen ) { // known word, mtf it, and return id (*ppEntry) = pEntry->m_pNextHash; pEntry->m_pNextHash = m_dHash [ uHash ]; m_dHash [ uHash ] = pEntry; return pEntry->m_uWordid; } a++; b++; } // collision detected! // our crc is taken as a wordid, but keyword does not match // welcome to the land of very tricky magic // // pEntry might either be a known exception, or a regular keyword // sWord might either be a known exception, or a new one // if they are not known, they needed to be added as exceptions now // // in case sWord is new, we need to assign a new unique wordid // for that, we keep incrementing the crc until it is unique // a starting point for wordid search loop would be handy // // let's scan the exceptions vector and work on all this // // NOTE, beware of the order, it is wordid asc, which does NOT guarantee crc asc // example, assume crc(w1)==X, crc(w2)==X+1, crc(w3)==X (collides with w1) // wordids will be X, X+1, X+2 but crcs will be X, X+1, X // // OPTIMIZE, might make sense to use binary search // OPTIMIZE, add early out somehow SphWordID_t uWordid = uCRC + 1; const int iExcLen = m_dExceptions.GetLength(); int iExc = m_dExceptions.GetLength(); ARRAY_FOREACH ( i, m_dExceptions ) { const HitblockKeyword_t * pExcWord = m_dExceptions[i].m_pEntry; // incoming word is a known exception? just return the pre-assigned wordid if ( m_dExceptions[i].m_uCRC==uCRC && strncmp ( pExcWord->m_pKeyword, sWord, iLen )==0 ) return pExcWord->m_uWordid; // incoming word collided into a known exception? clear the matched entry; no need to re-add it (see below) if ( pExcWord==pEntry ) pEntry = NULL; // find first exception with wordid greater or equal to our candidate if ( pExcWord->m_uWordid>=uWordid && iExc==iExcLen ) iExc = i; } // okay, this is a new collision // if entry was a regular word, we have to add it if ( pEntry ) { m_dExceptions.Add(); m_dExceptions.Last().m_pEntry = pEntry; m_dExceptions.Last().m_uCRC = uCRC; } // need to assign a new unique wordid now // keep scanning both exceptions and keywords for collisions for ( ;; ) { // iExc must be either the first exception greater or equal to current candidate, or out of bounds assert ( iExc==iExcLen || m_dExceptions[iExc].m_pEntry->m_uWordid>=uWordid ); assert ( iExc==0 || m_dExceptions[iExc-1].m_pEntry->m_uWordidm_uWordid==uWordid ) { uWordid++; while ( iExcm_uWordidm_uWordid==uWordid ) break; pCheck = pCheck->m_pNextHash; } // no collisions; we've found our unique wordid! if ( !pCheck ) break; // got a collision; add it HitblockException_t & tColl = m_dExceptions.Add(); tColl.m_pEntry = pCheck; tColl.m_uCRC = pCheck->m_uWordid; // not a known exception; hence, wordid must equal crc // and keep looking uWordid++; continue; } // and finally, we have that precious new wordid // so hash our new unique under its new unique adjusted wordid pEntry = HitblockAddKeyword ( (DWORD)( uWordid % SLOTS ), sWord, iLen, uWordid ); // add it as a collision too m_dExceptions.Add(); m_dExceptions.Last().m_pEntry = pEntry; m_dExceptions.Last().m_uCRC = uCRC; // keep exceptions list sorted by wordid m_dExceptions.Sort(); return pEntry->m_uWordid; } // new keyword with unique crc pEntry = HitblockAddKeyword ( uHash, sWord, iLen, uCRC ); return pEntry->m_uWordid; } struct DictKeywordTagged_t : public CSphDictKeywords::DictKeyword_t { int m_iBlock; }; struct DictKeywordTaggedCmp_fn { static inline bool IsLess ( const DictKeywordTagged_t & a, const DictKeywordTagged_t & b ) { return strcmp ( a.m_sKeyword, b.m_sKeyword ) < 0; } }; static void DictReadEntry ( CSphBin * pBin, DictKeywordTagged_t & tEntry, BYTE * pKeyword ) { int iKeywordLen = pBin->ReadByte (); if ( iKeywordLen<0 ) { // early eof or read error; flag must be raised assert ( pBin->IsError() ); return; } assert ( iKeywordLen>0 && iKeywordLenReadBytes ( pKeyword, iKeywordLen )<0 ) { assert ( pBin->IsError() ); return; } pKeyword[iKeywordLen] = '\0'; tEntry.m_sKeyword = (char*)pKeyword; tEntry.m_uOff = pBin->UnzipOffset(); tEntry.m_iDocs = pBin->UnzipInt(); tEntry.m_iHits = pBin->UnzipInt(); tEntry.m_uHint = (BYTE) pBin->ReadByte(); } void CSphDictKeywords::DictBegin ( int iTmpDictFD, int iDictFD, int iDictLimit ) { m_iTmpFD = iTmpDictFD; m_wrTmpDict.CloseFile (); m_wrTmpDict.SetFile ( iTmpDictFD, NULL ); m_wrDict.CloseFile (); m_wrDict.SetFile ( iDictFD, NULL ); m_wrDict.PutByte ( 1 ); m_iDictLimit = Max ( iDictLimit, KEYWORD_CHUNK + DICT_CHUNK*(int)sizeof(DictKeyword_t) ); // can't use less than 1 chunk } bool CSphDictKeywords::DictEnd ( SphOffset_t * pCheckpointsPos, int * pCheckpointsCount, int iMemLimit, CSphString & sError ) { DictFlush (); m_wrTmpDict.CloseFile (); // tricky: file is not owned, so it won't get closed, and iTmpFD won't get invalidated if ( !m_dDictBlocks.GetLength() ) m_wrDict.CloseFile(); if ( m_wrTmpDict.IsError() || m_wrDict.IsError() ) { sError.SetSprintf ( "dictionary write error (out of space?)" ); return false; } if ( !m_dDictBlocks.GetLength() ) { *pCheckpointsPos = m_wrDict.GetPos (); *pCheckpointsCount = 0; return true; } // initialize readers CSphVector dBins ( m_dDictBlocks.GetLength() ); int iMaxBlock = 0; ARRAY_FOREACH ( i, m_dDictBlocks ) iMaxBlock = Max ( iMaxBlock, m_dDictBlocks[i].m_iLen ); iMemLimit = Max ( iMemLimit, iMaxBlock*m_dDictBlocks.GetLength() ); int iBinSize = CSphBin::CalcBinSize ( iMemLimit, m_dDictBlocks.GetLength(), "sort_dict" ); SphOffset_t iSharedOffset = -1; ARRAY_FOREACH ( i, m_dDictBlocks ) { dBins[i] = new CSphBin(); dBins[i]->m_iFileLeft = m_dDictBlocks[i].m_iLen; dBins[i]->m_iFilePos = m_dDictBlocks[i].m_iPos; dBins[i]->Init ( m_iTmpFD, &iSharedOffset, iBinSize ); } // keywords storage BYTE * pKeywords = new BYTE [ MAX_KEYWORD_BYTES*dBins.GetLength() ]; #define LOC_CLEANUP() \ { \ ARRAY_FOREACH ( i, dBins ) \ SafeDelete ( dBins[i] ); \ SafeDeleteArray ( pKeywords ); \ } // do the sort CSphQueue < DictKeywordTagged_t, DictKeywordTaggedCmp_fn > qWords ( dBins.GetLength() ); DictKeywordTagged_t tEntry; ARRAY_FOREACH ( i, dBins ) { DictReadEntry ( dBins[i], tEntry, pKeywords + i*MAX_KEYWORD_BYTES ); if ( dBins[i]->IsError() ) { sError.SetSprintf ( "entry read error in dictionary sort (bin %d of %d)", i, dBins.GetLength() ); LOC_CLEANUP(); return false; } tEntry.m_iBlock = i; qWords.Push ( tEntry ); } CSphKeywordDeltaWriter tLastKeyword; int iWords = 0; while ( qWords.GetLength() ) { const DictKeywordTagged_t & tWord = qWords.Root(); const int iLen = strlen ( tWord.m_sKeyword ); // OPTIMIZE? // store checkpoints as needed if ( ( iWords % SPH_WORDLIST_CHECKPOINT )==0 ) { // emit a checkpoint, unless we're at the very dict beginning if ( iWords ) { m_wrDict.ZipInt ( 0 ); m_wrDict.ZipInt ( 0 ); } BYTE * sClone = new BYTE [ iLen+1 ]; // OPTIMIZE? pool these? memcpy ( sClone, tWord.m_sKeyword, iLen+1 ); sClone[iLen] = '\0'; CSphWordlistCheckpoint & tCheckpoint = m_dCheckpoints.Add (); tCheckpoint.m_sWord = (char*) sClone; tCheckpoint.m_iWordlistOffset = m_wrDict.GetPos(); tLastKeyword.Reset(); } iWords++; // write final dict entry assert ( iLen ); assert ( tWord.m_uOff ); assert ( tWord.m_iDocs ); assert ( tWord.m_iHits ); tLastKeyword.PutDelta ( m_wrDict, (const BYTE *)tWord.m_sKeyword, iLen ); m_wrDict.ZipOffset ( tWord.m_uOff ); m_wrDict.ZipInt ( tWord.m_iDocs ); m_wrDict.ZipInt ( tWord.m_iHits ); if ( tWord.m_uHint ) m_wrDict.PutByte ( tWord.m_uHint ); // next int iBin = tWord.m_iBlock; qWords.Pop (); if ( !dBins[iBin]->IsDone() ) { DictReadEntry ( dBins[iBin], tEntry, pKeywords + iBin*MAX_KEYWORD_BYTES ); if ( dBins[iBin]->IsError() ) { sError.SetSprintf ( "entry read error in dictionary sort (bin %d of %d)", iBin, dBins.GetLength() ); LOC_CLEANUP(); return false; } tEntry.m_iBlock = iBin; qWords.Push ( tEntry ); } } // end of dictionary block m_wrDict.ZipInt ( 0 ); m_wrDict.ZipInt ( 0 ); LOC_CLEANUP(); #undef LOC_CLEANUP // flush wordlist checkpoints *pCheckpointsPos = m_wrDict.GetPos(); *pCheckpointsCount = m_dCheckpoints.GetLength(); ARRAY_FOREACH ( i, m_dCheckpoints ) { const int iLen = strlen ( m_dCheckpoints[i].m_sWord ); assert ( m_dCheckpoints[i].m_iWordlistOffset>0 ); assert ( iLen>0 && iLenm_sKeyword, b->m_sKeyword ) < 0; } }; void CSphDictKeywords::DictFlush () { if ( !m_dDictChunks.GetLength() ) return; assert ( m_dDictChunks.GetLength() && m_dKeywordChunks.GetLength() ); // sort em int iTotalWords = m_dDictChunks.GetLength()*DICT_CHUNK - m_iDictChunkFree; CSphVector dWords ( iTotalWords ); int iIdx = 0; ARRAY_FOREACH ( i, m_dDictChunks ) { int iWords = DICT_CHUNK; if ( i==m_dDictChunks.GetLength()-1 ) iWords -= m_iDictChunkFree; DictKeyword_t * pWord = m_dDictChunks[i]; for ( int j=0; jm_sKeyword ); m_wrTmpDict.PutByte ( iLen ); m_wrTmpDict.PutBytes ( pWord->m_sKeyword, iLen ); m_wrTmpDict.ZipOffset ( pWord->m_uOff ); m_wrTmpDict.ZipInt ( pWord->m_iDocs ); m_wrTmpDict.ZipInt ( pWord->m_iHits ); m_wrTmpDict.PutByte ( pWord->m_uHint ); } tBlock.m_iLen = (int)( m_wrTmpDict.GetPos() - tBlock.m_iPos ); // clean up buffers ARRAY_FOREACH ( i, m_dDictChunks ) SafeDeleteArray ( m_dDictChunks[i] ); m_dDictChunks.Resize ( 0 ); m_pDictChunk = NULL; m_iDictChunkFree = 0; ARRAY_FOREACH ( i, m_dKeywordChunks ) SafeDeleteArray ( m_dKeywordChunks[i] ); m_dKeywordChunks.Resize ( 0 ); m_pKeywordChunk = NULL; m_iKeywordChunkFree = 0; m_iMemUse = 0; } void CSphDictKeywords::DictEntry ( SphWordID_t, BYTE * sKeyword, int iDocs, int iHits, SphOffset_t iDoclistOffset, SphOffset_t iDoclistLength ) { // they say, this might just happen during merge // FIXME! can we make merge avoid sending such keywords to dict and assert here? if ( !iDocs ) return; assert ( iHits ); assert ( iDoclistLength>0 ); DictKeyword_t * pWord = NULL; int iLen = strlen ( (char*)sKeyword ) + 1; for ( ;; ) { // alloc dict entry if ( !m_iDictChunkFree ) { if ( m_iDictLimit && ( m_iMemUse + (int)sizeof(DictKeyword_t)*DICT_CHUNK )>m_iDictLimit ) DictFlush (); m_pDictChunk = new DictKeyword_t [ DICT_CHUNK ]; m_iDictChunkFree = DICT_CHUNK; m_dDictChunks.Add ( m_pDictChunk ); m_iMemUse += sizeof(DictKeyword_t)*DICT_CHUNK; } // alloc keyword if ( m_iKeywordChunkFree < iLen ) { if ( m_iDictLimit && ( m_iMemUse + KEYWORD_CHUNK )>m_iDictLimit ) { DictFlush (); continue; // because we just flushed pWord } m_pKeywordChunk = new BYTE [ KEYWORD_CHUNK ]; m_iKeywordChunkFree = KEYWORD_CHUNK; m_dKeywordChunks.Add ( m_pKeywordChunk ); m_iMemUse += KEYWORD_CHUNK; } // aw kay break; } pWord = m_pDictChunk++; m_iDictChunkFree--; pWord->m_sKeyword = (char*)m_pKeywordChunk; memcpy ( m_pKeywordChunk, sKeyword, iLen ); m_pKeywordChunk[iLen-1] = '\0'; m_pKeywordChunk += iLen; m_iKeywordChunkFree -= iLen; pWord->m_uOff = iDoclistOffset; pWord->m_iDocs = iDocs; pWord->m_iHits = iHits; pWord->m_uHint = sphDoclistHintPack ( iDocs, iDoclistLength ); } SphWordID_t CSphDictKeywords::GetWordID ( BYTE * pWord ) { SphWordID_t uCRC = CSphDictCRC::GetWordID ( pWord ); if ( !uCRC || !m_bHitblock ) return uCRC; int iLen = strlen ( (const char *)pWord ); return HitblockGetID ( (const char *)pWord, iLen, uCRC ); } SphWordID_t CSphDictKeywords::GetWordIDWithMarkers ( BYTE * pWord ) { SphWordID_t uCRC = CSphDictCRC::GetWordIDWithMarkers ( pWord ); if ( !uCRC || !m_bHitblock ) return uCRC; int iLen = strlen ( (const char *)pWord ); return HitblockGetID ( (const char *)pWord, iLen, uCRC ); } SphWordID_t CSphDictKeywords::GetWordIDNonStemmed ( BYTE * pWord ) { SphWordID_t uCRC = CSphDictCRC::GetWordIDNonStemmed ( pWord ); if ( !uCRC || !m_bHitblock ) return uCRC; int iLen = strlen ( (const char *)pWord ); return HitblockGetID ( (const char *)pWord, iLen, uCRC ); } SphWordID_t CSphDictKeywords::GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ) { SphWordID_t uCRC = CSphDictCRC::GetWordID ( pWord, iLen, bFilterStops ); if ( !uCRC || !m_bHitblock ) return uCRC; return HitblockGetID ( (const char *)pWord, iLen, uCRC ); // !COMMIT would break, we kind of strcmp inside; but must never get called? } /// binary search for the first hit with wordid greater than or equal to reference static CSphWordHit * FindFirstGte ( CSphWordHit * pHits, int iHits, SphWordID_t uID ) { if ( pHits->m_iWordID==uID ) return pHits; CSphWordHit * pL = pHits; CSphWordHit * pR = pHits + iHits - 1; if ( pL->m_iWordID > uID || pR->m_iWordID < uID ) return NULL; while ( pR-pL!=1 ) { CSphWordHit * pM = pL + ( pR-pL )/2; if ( pM->m_iWordID < uID ) pL = pM; else pR = pM; } assert ( pR-pL==1 ); assert ( pL->m_iWordIDm_iWordID>=uID ); return pR; } /// full crc and keyword check static inline bool FullIsLess ( const CSphDictKeywords::HitblockException_t & a, const CSphDictKeywords::HitblockException_t & b ) { if ( a.m_uCRC!=b.m_uCRC ) return a.m_uCRC < b.m_uCRC; return strcmp ( a.m_pEntry->m_pKeyword, b.m_pEntry->m_pKeyword ) < 0; } /// sort functor to compute collided hits reordering struct HitblockPatchSort_fn { const CSphDictKeywords::HitblockException_t * m_pExc; explicit HitblockPatchSort_fn ( const CSphDictKeywords::HitblockException_t * pExc ) : m_pExc ( pExc ) {} bool IsLess ( int a, int b ) const { return FullIsLess ( m_pExc[a], m_pExc[b] ); } }; /// do hit block patching magic void CSphDictKeywords::HitblockPatch ( CSphWordHit * pHits, int iHits ) { if ( !pHits || iHits<=0 ) return; const CSphVector & dExc = m_dExceptions; // shortcut CSphVector dChunk; // reorder hit chunks for exceptions (aka crc collisions) for ( int iFirst = 0; iFirst < dExc.GetLength()-1; ) { // find next span of collisions, iFirst inclusive, iMax exclusive ie. [iFirst,iMax) // (note that exceptions array is always sorted) SphWordID_t uFirstWordid = dExc[iFirst].m_pEntry->m_uWordid; assert ( dExc[iFirst].m_uCRC==uFirstWordid ); int iMax = iFirst+1; SphWordID_t uSpan = uFirstWordid+1; while ( iMax < dExc.GetLength() && dExc[iMax].m_pEntry->m_uWordid==uSpan ) { iMax++; uSpan++; } // check whether they are in proper order already bool bSorted = true; for ( int i=iFirst; im_iWordID==uFirstWordid ); // find the chunk starts for ( int i=1; im_iWordID==uFirstWordid+i ); } CSphWordHit * pTemp; if ( iMax-iFirst==2 ) { // most frequent case, just two collisions // OPTIMIZE? allocate buffer for the smaller chunk, not just first chunk pTemp = new CSphWordHit [ dChunk[1]-dChunk[0] ]; memcpy ( pTemp, dChunk[0], ( dChunk[1]-dChunk[0] )*sizeof(CSphWordHit) ); memmove ( dChunk[0], dChunk[1], ( dChunk[2]-dChunk[1] )*sizeof(CSphWordHit) ); memcpy ( dChunk[0] + ( dChunk[2]-dChunk[1] ), pTemp, ( dChunk[1]-dChunk[0] )*sizeof(CSphWordHit) ); } else { // generic case, more than two CSphVector dReorder ( iMax-iFirst ); ARRAY_FOREACH ( i, dReorder ) dReorder[i] = i; HitblockPatchSort_fn fnSort ( &dExc[iFirst] ); dReorder.Sort ( fnSort ); // OPTIMIZE? could skip heading and trailing blocks that are already in position pTemp = new CSphWordHit [ dChunk.Last()-dChunk[0] ]; CSphWordHit * pOut = pTemp; ARRAY_FOREACH ( i, dReorder ) { int iChunk = dReorder[i]; int iHits = dChunk[iChunk+1] - dChunk[iChunk]; memcpy ( pOut, dChunk[iChunk], iHits*sizeof(CSphWordHit) ); pOut += iHits; } assert ( ( pOut-pTemp )==( dChunk.Last()-dChunk[0] ) ); memcpy ( dChunk[0], pTemp, ( dChunk.Last()-dChunk[0] )*sizeof(CSphWordHit) ); } // patching done SafeDeleteArray ( pTemp ); iFirst = iMax; } } const char * CSphDictKeywords::HitblockGetKeyword ( SphWordID_t uWordID ) { const DWORD uHash = (DWORD)( uWordID % SLOTS ); HitblockKeyword_t * pEntry = m_dHash [ uHash ]; while ( pEntry ) { // check crc if ( pEntry->m_uWordid!=uWordID ) { // crc mismatch, try next entry pEntry = pEntry->m_pNextHash; continue; } return pEntry->m_pKeyword; } ARRAY_FOREACH ( i, m_dExceptions ) if ( m_dExceptions[i].m_pEntry->m_uWordid==uWordID ) return m_dExceptions[i].m_pEntry->m_pKeyword; assert ( "hash missing value in operator []" ); return "\31oops"; } ////////////////////////////////////////////////////////////////////////// // KEYWORDS STORING DICTIONARY ////////////////////////////////////////////////////////////////////////// class CRtDictKeywords : public ISphRtDictWraper { private: CSphDict * m_pBase; SmallStringHash_T m_hKeywords; CSphVector m_dPackedKeywords; public: explicit CRtDictKeywords ( CSphDict * pBase ) : m_pBase ( pBase ) { m_dPackedKeywords.Add ( 0 ); // avoid zero offset at all costs } virtual ~CRtDictKeywords() {} virtual SphWordID_t GetWordID ( BYTE * pWord ) { SphWordID_t uCRC = m_pBase->GetWordID ( pWord ); if ( uCRC ) return AddKeyword ( pWord ); else return 0; } virtual SphWordID_t GetWordIDWithMarkers ( BYTE * pWord ) { SphWordID_t uCRC = m_pBase->GetWordIDWithMarkers ( pWord ); if ( uCRC ) return AddKeyword ( pWord ); else return 0; } virtual SphWordID_t GetWordIDNonStemmed ( BYTE * pWord ) { SphWordID_t uCRC = m_pBase->GetWordIDNonStemmed ( pWord ); if ( uCRC ) return AddKeyword ( pWord ); else return 0; } virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ) { SphWordID_t uCRC = m_pBase->GetWordID ( pWord, iLen, bFilterStops ); if ( uCRC ) return AddKeyword ( pWord ); else return 0; } virtual const BYTE * GetPackedKeywords () { return m_dPackedKeywords.Begin(); } virtual int GetPackedLen () { return m_dPackedKeywords.GetLength(); } virtual void ResetKeywords() { m_dPackedKeywords.Resize ( 0 ); m_dPackedKeywords.Add ( 0 ); // avoid zero offset at all costs m_hKeywords.Reset(); } SphWordID_t AddKeyword ( const BYTE * pWord ) { int iLen = strlen ( (const char *)pWord ); CSphString sWord; sWord.SetBinary ( (const char *)pWord, iLen ); int * pOff = m_hKeywords ( sWord ); if ( pOff ) { return *pOff; } assert ( iLen<255 ); int iOff = m_dPackedKeywords.GetLength(); m_dPackedKeywords.Resize ( iOff+iLen+1 ); m_dPackedKeywords[iOff] = (BYTE)( iLen & 0xFF ); memcpy ( m_dPackedKeywords.Begin()+iOff+1, pWord, iLen ); m_hKeywords.Add ( iOff, sWord ); return iOff; } virtual void LoadStopwords ( const char * sFiles, ISphTokenizer * pTokenizer ) { m_pBase->LoadStopwords ( sFiles, pTokenizer ); } virtual bool LoadWordforms ( const char * sFile, ISphTokenizer * pTokenizer, const char * sIndex ) { return m_pBase->LoadWordforms ( sFile, pTokenizer, sIndex ); } virtual bool SetMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ) { return m_pBase->SetMorphology ( szMorph, bUseUTF8, sError ); } virtual void Setup ( const CSphDictSettings & tSettings ) { m_pBase->Setup ( tSettings ); } virtual const CSphDictSettings & GetSettings () const { return m_pBase->GetSettings(); } virtual const CSphVector & GetStopwordsFileInfos () { return m_pBase->GetStopwordsFileInfos(); } virtual const CSphSavedFile & GetWordformsFileInfo () { return m_pBase->GetWordformsFileInfo(); } virtual const CSphMultiformContainer * GetMultiWordforms () const { return m_pBase->GetMultiWordforms(); } virtual bool IsStopWord ( const BYTE * pWord ) const { return m_pBase->IsStopWord ( pWord ); } }; ISphRtDictWraper * sphCreateRtKeywordsDictionaryWrapper ( CSphDict * pBase ) { return new CRtDictKeywords ( pBase ); } ////////////////////////////////////////////////////////////////////////// // DICTIONARY FACTORIES ////////////////////////////////////////////////////////////////////////// static CSphDict * SetupDictionary ( CSphDict * pDict, const CSphDictSettings & tSettings, ISphTokenizer * pTokenizer, CSphString & sError, const char * sIndex ) { assert ( pTokenizer ); assert ( pDict ); pDict->Setup ( tSettings ); if ( pDict->SetMorphology ( tSettings.m_sMorphology.cstr (), pTokenizer->IsUtf8(), sError ) ) sError = ""; pDict->LoadStopwords ( tSettings.m_sStopwords.cstr (), pTokenizer ); pDict->LoadWordforms ( tSettings.m_sWordforms.cstr (), pTokenizer, sIndex ); return pDict; } CSphDict * sphCreateDictionaryCRC ( const CSphDictSettings & tSettings, ISphTokenizer * pTokenizer, CSphString & sError, const char * sIndex ) { CSphDict * pDict = NULL; if ( tSettings.m_bCrc32 ) pDict = new CSphDictCRC (); else pDict = new CSphDictCRC (); if ( !pDict ) return NULL; return SetupDictionary ( pDict, tSettings, pTokenizer, sError, sIndex ); } CSphDict * sphCreateDictionaryKeywords ( const CSphDictSettings & tSettings, ISphTokenizer * pTokenizer, CSphString & sError, const char * sIndex ) { CSphDict * pDict = new CSphDictKeywords(); return SetupDictionary ( pDict, tSettings, pTokenizer, sError, sIndex ); } void sphShutdownWordforms () { CSphDictCRCTraits::SweepWordformContainers ( NULL, 0 ); } ///////////////////////////////////////////////////////////////////////////// // HTML STRIPPER ///////////////////////////////////////////////////////////////////////////// static inline int sphIsTag ( int c ) { return sphIsAlpha(c) || c=='.' || c==':'; } static inline int sphIsTagStart ( int c ) { return ( c>='a' && c<='z' ) || ( c>='A' && c<='Z' ) || c=='_' || c=='.' || c==':'; } CSphHTMLStripper::CSphHTMLStripper ( bool bDefaultTags ) { if ( bDefaultTags ) { // known inline tags const char * dKnown[] = { "a", "b", "i", "s", "u", "basefont", "big", "em", "font", "img", "label", "small", "span", "strike", "strong", "sub\0", "sup\0", // fix gcc 3.4.3 on solaris10 compiler bug "tt" }; m_dTags.Resize ( sizeof(dKnown)/sizeof(dKnown[0]) ); ARRAY_FOREACH ( i, m_dTags ) { m_dTags[i].m_sTag = dKnown[i]; m_dTags[i].m_iTagLen = strlen ( dKnown[i] ); m_dTags[i].m_bInline = true; } } UpdateTags (); } int CSphHTMLStripper::GetCharIndex ( int iCh ) const { if ( iCh>='a' && iCh<='z' ) return iCh-'a'; if ( iCh>='A' && iCh<='Z' ) return iCh-'A'; if ( iCh=='_' ) return 26; if ( iCh==':' ) return 27; return -1; } void CSphHTMLStripper::UpdateTags () { m_dTags.Sort (); for ( int i=0; i=(int)sizeof(sTag) ) LOC_ERROR ( "tag name too long", s ); strncpy ( sTag, s, p-s ); sTag[p-s] = '\0'; // skip spaces while ( *p && isspace(*p) ) p++; if ( *p++!='=' ) LOC_ERROR ( "'=' expected", p-1 ); // add indexed tag entry, if not there yet strlwr ( sTag ); int iIndexTag = -1; ARRAY_FOREACH ( i, m_dTags ) if ( m_dTags[i].m_sTag==sTag ) { iIndexTag = i; break; } if ( iIndexTag<0 ) { m_dTags.Add(); m_dTags.Last().m_sTag = sTag; m_dTags.Last().m_iTagLen = strlen ( sTag ); iIndexTag = m_dTags.GetLength()-1; } m_dTags[iIndexTag].m_bIndexAttrs = true; CSphVector & dAttrs = m_dTags[iIndexTag].m_dAttrs; // scan attributes while ( *p ) { // skip spaces while ( *p && isspace(*p) ) p++; if ( !*p ) break; // check attr name s = p; while ( sphIsTag(*p) ) p++; if ( s==p ) LOC_ERROR ( "invalid character in attribute name", s ); // get attr name if ( p-s>=(int)sizeof(sAttr) ) LOC_ERROR ( "attribute name too long", s ); strncpy ( sAttr, s, p-s ); sAttr[p-s] = '\0'; // add attr, if not there yet int iAttr; for ( iAttr=0; iAttr' || *p=='\r' ) pProbEnd = p; p++; } if ( *p==cEnd ) return p+1; if ( pProbEnd ) return pProbEnd; return p; } struct HtmlEntity_t { const char * m_sName; int m_iCode; }; static inline DWORD HtmlEntityHash ( const BYTE * str, int len ) { static const unsigned short asso_values[] = { 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 4, 6, 22, 1, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 170, 48, 0, 5, 44, 0, 10, 10, 86, 421, 7, 0, 1, 42, 93, 41, 421, 0, 5, 8, 14, 421, 421, 5, 11, 8, 421, 421, 421, 421, 421, 421, 1, 25, 27, 9, 2, 113, 82, 14, 3, 179, 1, 81, 91, 12, 0, 1, 180, 56, 17, 5, 31, 60, 7, 3, 161, 2, 3, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421, 421 }; register int hval = len; switch ( hval ) { default: hval += asso_values [ str[4] ]; case 4: case 3: hval += asso_values [ str[2] ]; case 2: hval += asso_values [ str[1]+1 ]; case 1: hval += asso_values [ str[0] ]; break; } return hval + asso_values [ str[len-1] ]; } static inline int HtmlEntityLookup ( const BYTE * str, int len ) { static const unsigned char lengthtable[] = { 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 3, 3, 4, 3, 3, 5, 3, 6, 5, 5, 3, 4, 4, 5, 3, 4, 4, 0, 5, 4, 5, 6, 5, 6, 4, 5, 3, 3, 5, 0, 0, 0, 0, 6, 0, 5, 5, 0, 5, 6, 6, 3, 0, 3, 5, 3, 0, 6, 0, 4, 3, 6, 3, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 2, 6, 4, 0, 6, 3, 3, 3, 0, 4, 5, 4, 4, 4, 3, 7, 4, 3, 6, 2, 3, 6, 4, 3, 6, 5, 6, 5, 5, 4, 2, 0, 0, 4, 6, 8, 0, 0, 0, 5, 5, 0, 6, 6, 2, 2, 4, 4, 6, 6, 4, 4, 5, 6, 2, 3, 4, 6, 5, 0, 2, 0, 0, 6, 6, 6, 6, 6, 4, 6, 5, 0, 6, 4, 5, 4, 6, 6, 0, 0, 4, 6, 5, 6, 0, 6, 4, 5, 6, 5, 6, 4, 0, 3, 6, 0, 4, 4, 4, 5, 4, 6, 0, 4, 4, 6, 5, 6, 7, 2, 2, 6, 2, 5, 2, 5, 0, 0, 0, 4, 4, 2, 4, 2, 2, 4, 0, 4, 4, 4, 5, 5, 0, 3, 7, 5, 0, 5, 6, 5, 0, 6, 0, 6, 0, 4, 6, 4, 6, 6, 2, 6, 0, 5, 5, 4, 6, 6, 0, 5, 6, 4, 4, 4, 4, 0, 5, 0, 5, 0, 4, 5, 4, 0, 4, 4, 4, 0, 0, 0, 4, 0, 0, 0, 5, 6, 5, 3, 0, 0, 6, 5, 4, 5, 5, 5, 5, 0, 5, 5, 0, 5, 0, 0, 0, 4, 6, 0, 3, 0, 5, 5, 0, 0, 3, 6, 5, 0, 4, 0, 0, 0, 0, 5, 7, 5, 3, 5, 3, 0, 0, 6, 0, 6, 0, 0, 7, 0, 0, 5, 0, 5, 0, 0, 0, 0, 5, 4, 0, 0, 0, 0, 0, 7, 4, 0, 0, 3, 0, 0, 0, 3, 0, 6, 0, 0, 7, 5, 5, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 5, 5, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 6, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 }; static const struct HtmlEntity_t wordlist[] = { {""}, {""}, {""}, {""}, {""}, {""}, {"Rho", 929}, {""}, {""}, {""}, {""}, {""}, {"Chi", 935}, {"phi", 966}, {"iota", 953}, {"psi", 968}, {"int", 8747}, {"theta", 952}, {"amp", 38}, {"there4", 8756}, {"Theta", 920}, {"omega", 969}, {"and", 8743}, {"prop", 8733}, {"ensp", 8194}, {"image", 8465}, {"not", 172}, {"isin", 8712}, {"sdot", 8901}, {""}, {"prime", 8242}, {"prod", 8719}, {"trade", 8482}, {"Scaron", 352}, {"kappa", 954}, {"thinsp", 8201}, {"emsp", 8195}, {"thorn", 254}, {"eta", 951}, {"chi", 967}, {"Kappa", 922}, {""}, {""}, {""}, {""}, {"scaron", 353}, {""}, {"notin", 8713}, {"ndash", 8211}, {""}, {"acute", 180}, {"otilde", 245}, {"atilde", 227}, {"Phi", 934}, {""}, {"Psi", 936}, {"pound", 163}, {"cap", 8745}, {""}, {"otimes", 8855}, {""}, {"nbsp", 32}, {"rho", 961}, {"ntilde", 241}, {"eth", 240}, {"oacute", 243}, {"aacute", 225}, {"eacute", 233}, {"iacute", 237}, {"nabla", 8711}, {"Prime", 8243}, {"ocirc", 244}, {"acirc", 226}, {"ecirc", 234}, {"icirc", 238}, {"or", 8744}, {"Yacute", 221}, {"nsub", 8836}, {""}, {"Uacute", 218}, {"Eta", 919}, {"ETH", 208}, {"sup", 8835}, {""}, {"supe", 8839}, {"Ucirc", 219}, {"sup1", 185}, {"para", 182}, {"sup2", 178}, {"loz", 9674}, {"omicron", 959}, {"part", 8706}, {"cup", 8746}, {"Ntilde", 209}, {"Mu", 924}, {"tau", 964}, {"uacute", 250}, {"Iota", 921}, {"Tau", 932}, {"rsaquo", 8250}, {"alpha", 945}, {"Ccedil", 199}, {"ucirc", 251}, {"oline", 8254}, {"sup3", 179}, {"nu", 957}, {""}, {""}, {"sube", 8838}, {"Eacute", 201}, {"thetasym", 977}, {""}, {""}, {""}, {"Omega", 937}, {"Ecirc", 202}, {""}, {"lowast", 8727}, {"iquest", 191}, {"lt", 60}, {"gt", 62}, {"ordm", 186}, {"euro", 8364}, {"oslash", 248}, {"lsaquo", 8249}, {"zeta", 950}, {"cong", 8773}, {"mdash", 8212}, {"ccedil", 231}, {"ne", 8800}, {"sub", 8834}, {"Zeta", 918}, {"Lambda", 923}, {"Gamma", 915}, {""}, {"Nu", 925}, {""}, {""}, {"ograve", 242}, {"agrave", 224}, {"egrave", 232}, {"igrave", 236}, {"frac14", 188}, {"ordf", 170}, {"Otilde", 213}, {"infin", 8734}, {""}, {"frac12", 189}, {"beta", 946}, {"radic", 8730}, {"darr", 8595}, {"Iacute", 205}, {"Ugrave", 217}, {""}, {""}, {"harr", 8596}, {"hearts", 9829}, {"Icirc", 206}, {"Oacute", 211}, {""}, {"frac34", 190}, {"cent", 162}, {"crarr", 8629}, {"curren", 164}, {"Ocirc", 212}, {"brvbar", 166}, {"sect", 167}, {""}, {"ang", 8736}, {"ugrave", 249}, {""}, {"Beta", 914}, {"uarr", 8593}, {"dArr", 8659}, {"asymp", 8776}, {"perp", 8869}, {"Dagger", 8225}, {""}, {"hArr", 8660}, {"rang", 9002}, {"dagger", 8224}, {"exist", 8707}, {"Egrave", 200}, {"Omicron", 927}, {"mu", 956}, {"pi", 960}, {"weierp", 8472}, {"xi", 958}, {"clubs", 9827}, {"Xi", 926}, {"aring", 229}, {""}, {""}, {""}, {"copy", 169}, {"uArr", 8657}, {"ni", 8715}, {"rarr", 8594}, {"le", 8804}, {"ge", 8805}, {"zwnj", 8204}, {""}, {"apos", 39}, {"macr", 175}, {"lang", 9001}, {"gamma", 947}, {"Delta", 916}, {""}, {"uml", 168}, {"alefsym", 8501}, {"delta", 948}, {""}, {"bdquo", 8222}, {"lambda", 955}, {"equiv", 8801}, {""}, {"Oslash", 216}, {""}, {"hellip", 8230}, {""}, {"rArr", 8658}, {"Atilde", 195}, {"larr", 8592}, {"spades", 9824}, {"Igrave", 204}, {"Pi", 928}, {"yacute", 253}, {""}, {"diams", 9830}, {"sbquo", 8218}, {"fnof", 402}, {"Ograve", 210}, {"plusmn", 177}, {""}, {"rceil", 8969}, {"Aacute", 193}, {"ouml", 246}, {"auml", 228}, {"euml", 235}, {"iuml", 239}, {""}, {"Acirc", 194}, {""}, {"rdquo", 8221}, {""}, {"lArr", 8656}, {"rsquo", 8217}, {"Yuml", 376}, {""}, {"quot", 34}, {"Uuml", 220}, {"bull", 8226}, {""}, {""}, {""}, {"real", 8476}, {""}, {""}, {""}, {"lceil", 8968}, {"permil", 8240}, {"upsih", 978}, {"sum", 8721}, {""}, {""}, {"divide", 247}, {"raquo", 187}, {"uuml", 252}, {"ldquo", 8220}, {"Alpha", 913}, {"szlig", 223}, {"lsquo", 8216}, {""}, {"Sigma", 931}, {"tilde", 732}, {""}, {"THORN", 222}, {""}, {""}, {""}, {"Euml", 203}, {"rfloor", 8971}, {""}, {"lrm", 8206}, {""}, {"sigma", 963}, {"iexcl", 161}, {""}, {""}, {"deg", 176}, {"middot", 183}, {"laquo", 171}, {""}, {"circ", 710}, {""}, {""}, {""}, {""}, {"frasl", 8260}, {"epsilon", 949}, {"oplus", 8853}, {"yen", 165}, {"micro", 181}, {"piv", 982}, {""}, {""}, {"lfloor", 8970}, {""}, {"Agrave", 192}, {""}, {""}, {"Upsilon", 933}, {""}, {""}, {"times", 215}, {""}, {"cedil", 184}, {""}, {""}, {""}, {""}, {"minus", 8722}, {"Iuml", 207}, {""}, {""}, {""}, {""}, {""}, {"upsilon", 965}, {"Ouml", 214}, {""}, {""}, {"rlm", 8207}, {""}, {""}, {""}, {"reg", 174}, {""}, {"forall", 8704}, {""}, {""}, {"Epsilon", 917}, {"empty", 8709}, {"OElig", 338}, {""}, {"shy", 173}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {"Aring", 197}, {""}, {""}, {""}, {"oelig", 339}, {"aelig", 230}, {""}, {"zwj", 8205}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {"sim", 8764}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {"yuml", 255}, {"sigmaf", 962}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {"Auml", 196}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {""}, {"AElig", 198} }; const int MIN_WORD_LENGTH = 2; const int MAX_WORD_LENGTH = 8; const int MAX_HASH_VALUE = 420; if ( len<=MAX_WORD_LENGTH && len>=MIN_WORD_LENGTH ) { register int key = HtmlEntityHash ( str, len ); if ( key<=MAX_HASH_VALUE && key>=0 ) if ( len==lengthtable[key] ) { register const char * s = wordlist[key].m_sName; if ( *str==*s && !memcmp ( str+1, s+1, len-1 ) ) return wordlist[key].m_iCode; } } return 0; } void CSphHTMLStripper::Strip ( BYTE * sData ) const { const BYTE * s = sData; BYTE * d = sData; for ( ;; ) { ///////////////////////////////////// // scan until eof, or tag, or entity ///////////////////////////////////// while ( *s && *s!='<' && *s!='&' ) { if ( *s>=0x20 ) *d++ = *s; else *d++ = ' '; s++; } if ( !*s ) break; ///////////////// // handle entity ///////////////// if ( *s=='&' ) { if ( s[1]=='#' ) { // handle "&#number;" form int iCode = 0; s += 2; while ( isdigit(*s) ) iCode = iCode*10 + (*s++) - '0'; if ( ( iCode>=0 && iCode<=0x1f ) || *s!=';' ) // 0-31 are reserved codes continue; d += sphUTF8Encode ( d, iCode ); s++; } else { // skip until ';' or max length if ( ( s[1]>='a' && s[1]<='z' ) || ( s[1]>='A' && s[1]<='Z' ) ) { const int MAX_ENTITY_LEN = 8; const BYTE * sStart = s+1; while ( *s && *s!=';' && s-sStart<=MAX_ENTITY_LEN ) s++; if ( *s==';' ) { int iCode = HtmlEntityLookup ( sStart, (int)(s-sStart) ); if ( iCode>0 ) { // this is a known entity; encode it d += sphUTF8Encode ( d, iCode ); s++; continue; } } // rollback s = sStart-1; } // if we're here, it's not an entity; pass the leading ampersand and rescan *d++ = *s++; } continue; } ////////////// // handle tag ////////////// assert ( *s=='<' ); if ( GetCharIndex(s[1])<0 ) { if ( s[1]=='/' ) { // check if it's valid closing tag if ( GetCharIndex(s[2])<0 ) { *d++ = *s++; continue; } } else if ( s[1]=='!' ) { if ( s[2]=='-' && s[3]=='-' ) { // it's valid comment; scan until comment end s += 4; // skip opening '' continue; } else if ( isalpha(s[2]) ) { // it's style PI; scan until PI end s += 2; while ( *s && *s!='>' ) { if ( *s=='\'' || *s=='"' ) { s = SkipQuoted ( s ); while ( isspace(*s) ) s++; } else { s++; } } if ( *s=='>' ) s++; continue; } else { // it's something malformed; just ignore *d++ = *s++; continue; } } else if ( s[1]=='?' ) { // scan until PI end s += 2; // skip opening '' ) break; s++; } if ( !*s ) break; s += 2; // skip closing '?>' continue; } else { // simply malformed *d++ = *s++; continue; } } s++; // skip '<' ////////////////////////////////////// // lookup this tag in known tags list ////////////////////////////////////// const StripperTag_t * pTag = NULL; int iZoneNameLen = 0; const BYTE * sZoneName = NULL; s = FindTag ( s, &pTag, &sZoneName, &iZoneNameLen ); ///////////////////////////////////// // process tag contents // index attributes if needed // gracefully handle malformed stuff ///////////////////////////////////// #define LOC_SKIP_SPACES() { while ( sphIsSpace(*s) ) s++; if ( !*s || *s=='>' ) break; } bool bIndexAttrs = ( pTag && pTag->m_bIndexAttrs ); while ( *s && *s!='>' ) { LOC_SKIP_SPACES(); if ( sphIsTagStart(*s) ) { // skip attribute name while it's valid const BYTE * sAttr = s; while ( sphIsTag(*s) ) s++; // blanks or a value after a valid attribute name? if ( sphIsSpace(*s) || *s=='=' ) { const int iAttrLen = (int)( s - sAttr ); LOC_SKIP_SPACES(); // a valid name but w/o a value; keep scanning if ( *s!='=' ) continue; // got value! s++; LOC_SKIP_SPACES(); // check attribute name // OPTIMIZE! remove linear search int iAttr = -1; if ( bIndexAttrs ) { for ( iAttr=0; iAttrm_dAttrs.GetLength(); iAttr++ ) { int iLen = strlen ( pTag->m_dAttrs[iAttr].cstr() ); if ( iLen==iAttrLen && !strncasecmp ( pTag->m_dAttrs[iAttr].cstr(), (const char*)sAttr, iLen ) ) break; } if ( iAttr==pTag->m_dAttrs.GetLength() ) iAttr = -1; } // process the value const BYTE * sVal = s; if ( *s=='\'' || *s=='"' ) { // skip quoted value until a matching quote s = SkipQuoted ( s ); } else { // skip unquoted value until tag end or whitespace while ( *s && *s!='>' && !sphIsSpace(*s) ) s++; } // if this one is to be indexed, copy it if ( iAttr>=0 ) { const BYTE * sMax = s; if ( *sVal=='\'' || *sVal=='"' ) { if ( sMax[-1]==sVal[0] ) sMax--; sVal++; } while ( sValm_bZone ) { // should be at tag's end assert ( s[0]=='\0' || s[-1]=='>' ); // emit secret codes *d++ = MAGIC_CODE_ZONE; for ( int i=0; im_bPara ) { *d++ = MAGIC_CODE_PARAGRAPH; continue; } // in all cases, the tag must be fully processed at this point // not a remove-tag? we're done if ( !pTag->m_bRemove ) { if ( !pTag->m_bInline ) *d++ = ' '; continue; } // sudden eof? bail out if ( !*s ) break; // must be a proper remove-tag end, then assert ( pTag->m_bRemove && s[-1]=='>' ); // short-form? we're done if ( s[-2]=='/' ) continue; // skip everything until the closing tag // FIXME! should we handle insane cases with quoted closing tag within tag? for ( ;; ) { while ( *s && ( s[0]!='<' || s[1]!='/' ) ) s++; if ( !*s ) break; s += 2; // skip m_sTag.cstr(), (const char*)s, pTag->m_iTagLen )!=0 ) continue; if ( !sphIsTag ( s[pTag->m_iTagLen] ) ) { s += pTag->m_iTagLen; // skip tag if ( *s=='>' ) s++; break; } } if ( !pTag->m_bInline ) *d++ = ' '; } *d++ = '\0'; // space, paragraph sequences elimination pass s = sData; d = sData; bool bSpaceOut = false; bool bParaOut = false; bool bZoneOut = false; while ( const char c = *s++ ) { assert ( d<=s-1 ); // handle different character classes if ( sphIsSpace(c) ) { // handle whitespace, skip dupes if ( !bSpaceOut ) *d++ = ' '; bSpaceOut = true; continue; } else if ( c==MAGIC_CODE_PARAGRAPH ) { // handle paragraph marker, skip dupes if ( !bParaOut && !bZoneOut ) { *d++ = c; bParaOut = true; } bSpaceOut = true; continue; } else if ( c==MAGIC_CODE_ZONE ) { // zone marker // rewind preceding paragraph, if any, it is redundant if ( bParaOut ) { assert ( d>sData && d[-1]==MAGIC_CODE_PARAGRAPH ); d--; } // copy \4zoneid\4 *d++ = c; while ( *s && *s!=MAGIC_CODE_ZONE ) *d++ = *s++; if ( *s ) *d++ = *s++; // update state // no spaces paragraphs allowed bSpaceOut = bZoneOut = true; bParaOut = false; continue; } else { *d++ = c; bSpaceOut = bParaOut = bZoneOut = false; } } *d++ = '\0'; } const BYTE * CSphHTMLStripper::FindTag ( const BYTE * sSrc, const StripperTag_t ** ppTag, const BYTE ** ppZoneName, int * pZoneNameLen ) const { assert ( sSrc && ppTag && ppZoneName && pZoneNameLen ); assert ( sSrc[0]!='/' || sSrc[1]!='\0' ); const BYTE * sTagName = ( sSrc[0]=='/' ) ? sSrc+1 : sSrc; *ppZoneName = sSrc; *pZoneNameLen = 0; int iIdx = GetCharIndex ( sTagName[0] ); assert ( iIdx>=0 && iIdx=0 ) { int iStart = m_dStart[iIdx]; int iEnd = m_dEnd[iIdx]; for ( int i=iStart; i<=iEnd; i++ ) { int iLen = m_dTags[i].m_iTagLen; int iCmp = strncasecmp ( m_dTags[i].m_sTag.cstr(), (const char*)sTagName, iLen ); // the tags are sorted; so if current candidate is already greater, rest can be skipped if ( iCmp>0 ) break; // do we have a match? if ( iCmp==0 ) { // got exact match? if ( !sphIsTag ( sTagName[iLen] ) ) { *ppTag = m_dTags.Begin() + i; sSrc = sTagName + iLen; // skip tag name if ( m_dTags[i].m_bZone ) *pZoneNameLen = sSrc - *ppZoneName; break; } // got wildcard match? if ( m_dTags[i].m_bZonePrefix ) { *ppTag = m_dTags.Begin() + i; sSrc = sTagName + iLen; while ( sphIsTag(*sSrc) ) sSrc++; *pZoneNameLen = sSrc - *ppZoneName; break; } } } } return sSrc; } bool CSphHTMLStripper::IsValidTagStart ( int iCh ) const { int i = GetCharIndex ( iCh ); return ( i>=0 && i0 ) && ( m_dPrefixFields.GetLength()==0 || m_dPrefixFields.Contains ( sField ) ); bool bInfix = ( m_iMinInfixLen>0 ) && ( m_dInfixFields.GetLength()==0 || m_dInfixFields.Contains ( sField ) ); assert ( !( bPrefix && bInfix ) ); // no field must be marked both prefix and infix if ( bPrefix ) return SPH_WORDPART_PREFIX; if ( bInfix ) return SPH_WORDPART_INFIX; return SPH_WORDPART_WHOLE; } ////////////////////////////////////////////////////////////////////////// CSphSource::CSphSource ( const char * sName ) : m_pTokenizer ( NULL ) , m_pDict ( NULL ) , m_tSchema ( sName ) , m_bStripHTML ( false ) , m_iNullIds ( 0 ) , m_iMaxIds ( 0 ) { m_pStripper = new CSphHTMLStripper ( true ); } CSphSource::~CSphSource() { delete m_pStripper; } void CSphSource::SetDict ( CSphDict * pDict ) { assert ( pDict ); m_pDict = pDict; } const CSphSourceStats & CSphSource::GetStats () { return m_tStats; } bool CSphSource::SetStripHTML ( const char * sExtractAttrs, const char * sRemoveElements, bool bDetectParagraphs, const char * sZones, CSphString & sError ) { if ( !m_pStripper->SetIndexedAttrs ( sExtractAttrs, sError ) ) return false; if ( !m_pStripper->SetRemovedElements ( sRemoveElements, sError ) ) return false; if ( bDetectParagraphs ) m_pStripper->EnableParagraphs (); if ( !m_pStripper->SetZones ( sZones, sError ) ) return false; m_bStripHTML = true; return true; } void CSphSource::SetTokenizer ( ISphTokenizer * pTokenizer ) { assert ( pTokenizer ); m_pTokenizer = pTokenizer; } bool CSphSource::UpdateSchema ( CSphSchema * pInfo, CSphString & sError ) { assert ( pInfo ); // fill it if ( pInfo->m_dFields.GetLength()==0 && pInfo->GetAttrsCount()==0 ) { *pInfo = m_tSchema; return true; } // check it return m_tSchema.CompareTo ( *pInfo, sError ); } void CSphSource::Setup ( const CSphSourceSettings & tSettings ) { m_iMinPrefixLen = Max ( tSettings.m_iMinPrefixLen, 0 ); m_iMinInfixLen = Max ( tSettings.m_iMinInfixLen, 0 ); m_iBoundaryStep = Max ( tSettings.m_iBoundaryStep, -1 ); m_bIndexExactWords = tSettings.m_bIndexExactWords; m_iOvershortStep = Min ( Max ( tSettings.m_iOvershortStep, 0 ), 1 ); m_iStopwordStep = Min ( Max ( tSettings.m_iStopwordStep, 0 ), 1 ); m_bIndexSP = tSettings.m_bIndexSP; m_dPrefixFields = tSettings.m_dPrefixFields; m_dInfixFields = tSettings.m_dInfixFields; } SphDocID_t CSphSource::VerifyID ( SphDocID_t uID ) { if ( uID==0 ) { m_iNullIds++; return 0; } if ( uID==DOCID_MAX ) { m_iMaxIds++; return 0; } return uID; } ISphHits * CSphSource::IterateJoinedHits ( CSphString & ) { static ISphHits dDummy; m_tDocInfo.m_iDocID = 0; // pretend that's an eof return &dDummy; } ///////////////////////////////////////////////////////////////////////////// // DOCUMENT SOURCE ///////////////////////////////////////////////////////////////////////////// static void FormatEscaped ( FILE * fp, const char * sLine ) { // handle empty lines if ( !sLine || !*sLine ) { fprintf ( fp, "''" ); return; } // pass one, count the needed buffer size int iLen = strlen(sLine); int iOut = 0; for ( int i=0; i(int)sizeof(sMinibuffer) ) { sMaxibuffer = new char [ iOut+4 ]; // 4 is just my safety gap sBuffer = sMaxibuffer; } // pass two, escape it char * sOut = sBuffer; *sOut++ = '\''; for ( int i=0; i m_iMaxFileBufferSize ) { sphWarning ( "docid=" DOCID_FMT ": file '%s' too big for a field (size="INT64_FMT", max_file_field_buffer=%d)", m_tDocInfo.m_iDocID, (const char *)sField, iFileSize, m_iMaxFileBufferSize ); return false; } return true; } /// returns file size on success, and replaces *ppField with a pointer to data /// returns -1 on failure (and emits a warning) int CSphSource_Document::LoadFileField ( BYTE ** ppField, CSphString & sError ) { CSphAutofile tFileSource; BYTE * sField = *ppField; if ( tFileSource.Open ( (const char *)sField, SPH_O_READ, sError )==-1 ) { sphWarning ( "docid=" DOCID_FMT ": %s", m_tDocInfo.m_iDocID, sError.cstr() ); return -1; } int64_t iFileSize = tFileSource.GetSize(); if ( iFileSize+16 > m_iMaxFileBufferSize ) { sphWarning ( "docid=" DOCID_FMT ": file '%s' too big for a field (size="INT64_FMT", max_file_field_buffer=%d)", m_tDocInfo.m_iDocID, (const char *)sField, iFileSize, m_iMaxFileBufferSize ); return -1; } int iFieldBytes = (int)iFileSize; if ( !iFieldBytes ) return 0; int iBufSize = Max ( m_iReadFileBufferSize, 1 << sphLog2 ( iFieldBytes+15 ) ); if ( m_iReadFileBufferSize < iBufSize ) SafeDeleteArray ( m_pReadFileBuffer ); if ( !m_pReadFileBuffer ) { m_pReadFileBuffer = new char [ iBufSize ]; m_iReadFileBufferSize = iBufSize; } if ( !tFileSource.Read ( m_pReadFileBuffer, iFieldBytes, sError ) ) { sphWarning ( "docid=" DOCID_FMT ": read failed: %s", m_tDocInfo.m_iDocID, sError.cstr() ); return -1; } m_pReadFileBuffer[iFieldBytes] = '\0'; *ppField = (BYTE*)m_pReadFileBuffer; return iFieldBytes; } ////////////////////////////////////////////////////////////////////////// // HIT GENERATORS ////////////////////////////////////////////////////////////////////////// bool CSphSource_Document::BuildZoneHits ( SphDocID_t uDocid, BYTE * sWord ) { if ( *sWord==MAGIC_CODE_SENTENCE || *sWord==MAGIC_CODE_PARAGRAPH || *sWord==MAGIC_CODE_ZONE ) { m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( (BYTE*)MAGIC_WORD_SENTENCE ), m_tState.m_iHitPos ); if ( *sWord==MAGIC_CODE_PARAGRAPH || *sWord==MAGIC_CODE_ZONE ) m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( (BYTE*)MAGIC_WORD_PARAGRAPH ), m_tState.m_iHitPos ); if ( *sWord==MAGIC_CODE_ZONE ) { BYTE * pZone = (BYTE*) m_pTokenizer->GetBufferPtr(); BYTE * pEnd = pZone; while ( *pEnd && *pEnd!=MAGIC_CODE_ZONE ) { pEnd++; } if ( *pEnd && *pEnd==MAGIC_CODE_ZONE ) { *pEnd = '\0'; m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( pZone-1 ), m_tState.m_iHitPos ); m_pTokenizer->SetBufferPtr ( (const char*) pEnd+1 ); } } m_tState.m_iBuildLastStep = 1; return true; } return false; } // track blended start and reset on not blended token static int TrackBlendedStart ( const ISphTokenizer * pTokenizer, int iBlendedHitsStart, int iHitsCount ) { iBlendedHitsStart = ( ( pTokenizer->TokenIsBlended() || pTokenizer->TokenIsBlendedPart() ) ? iBlendedHitsStart : -1 ); if ( pTokenizer->TokenIsBlended() ) iBlendedHitsStart = iHitsCount; return iBlendedHitsStart; } #define BUILD_SUBSTRING_HITS_COUNT 4 void CSphSource_Document::BuildSubstringHits ( SphDocID_t uDocid, bool bPayload, ESphWordpart eWordpart, bool bSkipEndMarker ) { bool bPrefixField = ( eWordpart==SPH_WORDPART_PREFIX ); bool bInfixMode = m_iMinInfixLen > 0; int iMinInfixLen = bPrefixField ? m_iMinPrefixLen : m_iMinInfixLen; if ( !m_tState.m_bProcessingHits ) m_tState.m_iBuildLastStep = 1; BYTE * sWord = NULL; BYTE sBuf [ 16+3*SPH_MAX_WORD_LEN ]; int iIterHitCount = BUILD_SUBSTRING_HITS_COUNT; if ( bPrefixField ) iIterHitCount += SPH_MAX_WORD_LEN - m_iMinPrefixLen; else iIterHitCount += ( ( m_iMinInfixLen+SPH_MAX_WORD_LEN ) * ( SPH_MAX_WORD_LEN-m_iMinInfixLen ) / 2 ); // FIELDEND_MASK at blended token stream should be set for HEAD token too int iBlendedHitsStart = -1; // index all infixes while ( ( m_iMaxHits==0 || m_tHits.m_dData.GetLength()+iIterHitCountGetToken() )!=NULL ) { iBlendedHitsStart = TrackBlendedStart ( m_pTokenizer, iBlendedHitsStart, m_tHits.Length() ); if ( !bPayload ) { HITMAN::AddPos ( &m_tState.m_iHitPos, m_tState.m_iBuildLastStep + m_pTokenizer->GetOvershortCount()*m_iOvershortStep ); if ( m_pTokenizer->GetBoundary() ) HITMAN::AddPos ( &m_tState.m_iHitPos, m_iBoundaryStep ); m_tState.m_iBuildLastStep = 1; } if ( BuildZoneHits ( uDocid, sWord ) ) continue; int iLen = m_pTokenizer->GetLastTokenLen (); // always index full word (with magic head/tail marker(s)) int iBytes = strlen ( (const char*)sWord ); if ( m_bIndexExactWords ) { int iBytes = strlen ( (const char*)sWord ); memcpy ( sBuf + 1, sWord, iBytes ); sBuf[0] = MAGIC_WORD_HEAD_NONSTEMMED; sBuf[iBytes+1] = '\0'; m_tHits.AddHit ( uDocid, m_pDict->GetWordIDNonStemmed ( sBuf ), m_tState.m_iHitPos ); } memcpy ( sBuf + 1, sWord, iBytes ); sBuf[0] = MAGIC_WORD_HEAD; sBuf[iBytes+1] = '\0'; // stemmed word w/markers SphWordID_t iWord = m_pDict->GetWordIDWithMarkers ( sBuf ); if ( !iWord ) { m_tState.m_iBuildLastStep = m_iStopwordStep; continue; } m_tHits.AddHit ( uDocid, iWord, m_tState.m_iHitPos ); m_tState.m_iBuildLastStep = m_pTokenizer->TokenIsBlended() ? 0 : 1; // restore stemmed word int iStemmedLen = strlen ( ( const char *)sBuf ); sBuf [iStemmedLen - 1] = '\0'; // stemmed word w/o markers if ( strcmp ( (const char *)sBuf + 1, (const char *)sWord ) ) m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( sBuf + 1, iStemmedLen - 2, true ), m_tState.m_iHitPos ); // restore word memcpy ( sBuf + 1, sWord, iBytes ); sBuf[iBytes+1] = MAGIC_WORD_TAIL; sBuf[iBytes+2] = '\0'; // if there are no infixes, that's it if ( iMinInfixLen > iLen ) { // index full word m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( sWord ), m_tState.m_iHitPos ); continue; } // process all infixes int iMaxStart = bPrefixField ? 0 : ( iLen - iMinInfixLen ); BYTE * sInfix = sBuf + 1; for ( int iStart=0; iStart<=iMaxStart; iStart++ ) { BYTE * sInfixEnd = sInfix; for ( int i = 0; i < iMinInfixLen; i++ ) sInfixEnd += m_pTokenizer->GetCodepointLength ( *sInfixEnd ); for ( int i=iMinInfixLen; i<=iLen-iStart; i++ ) { m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( sInfix, sInfixEnd-sInfix, false ), m_tState.m_iHitPos ); // word start: add magic head if ( bInfixMode && iStart==0 ) m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( sInfix - 1, sInfixEnd-sInfix + 1, false ), m_tState.m_iHitPos ); // word end: add magic tail if ( bInfixMode && i==iLen-iStart ) m_tHits.AddHit ( uDocid, m_pDict->GetWordID ( sInfix, sInfixEnd-sInfix+1, false ), m_tState.m_iHitPos ); sInfixEnd += m_pTokenizer->GetCodepointLength ( *sInfixEnd ); } sInfix += m_pTokenizer->GetCodepointLength ( *sInfix ); } } m_tState.m_bProcessingHits = ( sWord!=NULL ); // mark trailing hits if ( !bSkipEndMarker && !m_tState.m_bProcessingHits && m_tHits.Length() ) { CSphWordHit * pHit = const_cast < CSphWordHit * > ( m_tHits.Last() ); Hitpos_t uRefPos = pHit->m_iWordPos; for ( ; pHit>=m_tHits.First() && pHit->m_iWordPos==uRefPos; pHit-- ) HITMAN::SetEndMarker ( &pHit->m_iWordPos ); // mark blended HEAD as trailing too if ( iBlendedHitsStart>=0 ) { assert ( iBlendedHitsStart>=0 && iBlendedHitsStart ( m_tHits.First()+iBlendedHitsStart ); uRefPos = pHit->m_iWordPos; const CSphWordHit * pEnd = m_tHits.First()+m_tHits.Length(); for ( ; pHitm_iWordPos==uRefPos; pHit++ ) HITMAN::SetEndMarker ( &pHit->m_iWordPos ); } } } #define BUILD_REGULAR_HITS_COUNT 6 void CSphSource_Document::BuildRegularHits ( SphDocID_t uDocid, bool bPayload, bool bSkipEndMarker ) { bool bWordDict = m_pDict->GetSettings().m_bWordDict; bool bGlobalPartialMatch = !bWordDict && ( m_iMinPrefixLen > 0 || m_iMinInfixLen > 0 ); if ( !m_tState.m_bProcessingHits ) m_tState.m_iBuildLastStep = 1; BYTE * sWord = NULL; BYTE sBuf [ 16+3*SPH_MAX_WORD_LEN ]; // FIELDEND_MASK at blended token stream should be set for HEAD token too int iBlendedHitsStart = -1; // index words only while ( ( m_iMaxHits==0 || m_tHits.m_dData.GetLength()+BUILD_REGULAR_HITS_COUNTGetToken() )!=NULL ) { iBlendedHitsStart = TrackBlendedStart ( m_pTokenizer, iBlendedHitsStart, m_tHits.Length() ); if ( !bPayload ) { HITMAN::AddPos ( &m_tState.m_iHitPos, m_tState.m_iBuildLastStep + m_pTokenizer->GetOvershortCount()*m_iOvershortStep ); if ( m_pTokenizer->GetBoundary() ) HITMAN::AddPos ( &m_tState.m_iHitPos, m_iBoundaryStep ); } if ( BuildZoneHits ( uDocid, sWord ) ) continue; if ( bGlobalPartialMatch ) { int iBytes = strlen ( (const char*)sWord ); memcpy ( sBuf + 1, sWord, iBytes ); sBuf[0] = MAGIC_WORD_HEAD; sBuf[iBytes+1] = '\0'; m_tHits.AddHit ( uDocid, m_pDict->GetWordIDWithMarkers ( sBuf ), m_tState.m_iHitPos ); } if ( m_bIndexExactWords ) { int iBytes = strlen ( (const char*)sWord ); memcpy ( sBuf + 1, sWord, iBytes ); sBuf[0] = MAGIC_WORD_HEAD_NONSTEMMED; sBuf[iBytes+1] = '\0'; m_tHits.AddHit ( uDocid, m_pDict->GetWordIDNonStemmed ( sBuf ), m_tState.m_iHitPos ); } SphWordID_t iWord = m_pDict->GetWordID ( sWord ); if ( iWord ) { m_tHits.AddHit ( uDocid, iWord, m_tState.m_iHitPos ); m_tState.m_iBuildLastStep = m_pTokenizer->TokenIsBlended() ? 0 : 1; } else m_tState.m_iBuildLastStep = m_iStopwordStep; } m_tState.m_bProcessingHits = ( sWord!=NULL ); // mark trailing hit if ( !bSkipEndMarker && !m_tState.m_bProcessingHits && m_tHits.Length() ) { CSphWordHit * pHit = const_cast < CSphWordHit * > ( m_tHits.Last() ); HITMAN::SetEndMarker ( &pHit->m_iWordPos ); // mark blended HEAD as trailing too if ( iBlendedHitsStart>=0 ) { assert ( iBlendedHitsStart>=0 && iBlendedHitsStart ( m_tHits.First() + iBlendedHitsStart ); HITMAN::SetEndMarker ( &pBlendedHit->m_iWordPos ); } } } void CSphSource_Document::BuildHits ( CSphString & sError, bool bSkipEndMarker ) { SphDocID_t uDocid = m_tDocInfo.m_iDocID; for ( ; m_tState.m_iFieldStrip ( sField ); iFieldBytes = (int) strlen ( (char*)sField ); } // tokenize and build hits m_tStats.m_iTotalBytes += iFieldBytes; m_pTokenizer->SetBuffer ( sField, iFieldBytes ); m_tState.m_iHitPos = HITMAN::Create ( m_tState.m_iField, m_tState.m_iStartPos ); } const CSphColumnInfo & tField = m_tSchema.m_dFields[m_tState.m_iField]; if ( tField.m_eWordpart!=SPH_WORDPART_WHOLE ) BuildSubstringHits ( uDocid, tField.m_bPayload, tField.m_eWordpart, bSkipEndMarker ); else BuildRegularHits ( uDocid, tField.m_bPayload, bSkipEndMarker ); if ( m_tState.m_bProcessingHits ) break; } m_tState.m_bDocumentDone = !m_tState.m_bProcessingHits; } ////////////////////////////////////////////////////////////////////////// SphRange_t CSphSource_Document::IterateFieldMVAStart ( int iAttr ) { SphRange_t tRange; tRange.m_iStart = tRange.m_iLength = 0; if ( iAttr<0 || iAttr>=m_tSchema.GetAttrsCount() ) return tRange; const CSphColumnInfo & tMva = m_tSchema.GetAttr ( iAttr ); int uOff = MVA_DOWNSIZE ( m_tDocInfo.GetAttr ( tMva.m_tLocator ) ); if ( !uOff ) return tRange; int iCount = m_dMva[uOff]; assert ( iCount ); tRange.m_iStart = uOff+1; tRange.m_iLength = iCount; return tRange; } static int sphAddMva64 ( CSphVector & dStorage, uint64_t uVal ) { int uOff = dStorage.GetLength(); dStorage.Resize ( uOff+2 ); dStorage[uOff] = MVA_DOWNSIZE ( uVal ); dStorage[uOff+1] = MVA_DOWNSIZE ( ( uVal>>32 ) & 0xffffffff ); return uOff; } int CSphSource_Document::ParseFieldMVA ( CSphVector < DWORD > & dMva, const char * szValue, bool bMva64 ) { if ( !szValue ) return 0; const char * pPtr = szValue; const char * pDigit = NULL; const int MAX_NUMBER_LEN = 64; char szBuf [MAX_NUMBER_LEN]; assert ( dMva.GetLength() ); // must not have zero offset int uOff = dMva.GetLength(); dMva.Add ( 0 ); // reserve value for count while ( *pPtr ) { if ( *pPtr>='0' && *pPtr<='9' ) { if ( !pDigit ) pDigit = pPtr; } else { if ( pDigit ) { if ( pPtr - pDigit < MAX_NUMBER_LEN ) { strncpy ( szBuf, pDigit, pPtr - pDigit ); szBuf [pPtr - pDigit] = '\0'; if ( !bMva64 ) dMva.Add ( sphToDword ( szBuf ) ); else sphAddMva64 ( dMva, sphToUint64 ( szBuf ) ); } pDigit = NULL; } } pPtr++; } if ( pDigit ) { if ( !bMva64 ) dMva.Add ( sphToDword ( pDigit ) ); else sphAddMva64 ( dMva, sphToUint64 ( pDigit ) ); } int iCount = dMva.GetLength()-uOff-1; if ( !iCount ) { dMva.Pop(); // remove reserved value for count in case of 0 MVAs return 0; } else { dMva[uOff] = iCount; return uOff; // return offset to ( count, [value] ) } } ///////////////////////////////////////////////////////////////////////////// // GENERIC SQL SOURCE ///////////////////////////////////////////////////////////////////////////// CSphSourceParams_SQL::CSphSourceParams_SQL () : m_iRangeStep ( 1024 ) , m_iRefRangeStep ( 1024 ) , m_bPrintQueries ( false ) , m_iRangedThrottle ( 0 ) , m_iMaxFileBufferSize ( 0 ) , m_eOnFileFieldError ( FFE_IGNORE_FIELD ) , m_iPort ( 0 ) { } const char * const CSphSource_SQL::MACRO_VALUES [ CSphSource_SQL::MACRO_COUNT ] = { "$start", "$end" }; CSphSource_SQL::CSphSource_SQL ( const char * sName ) : CSphSource_Document ( sName ) , m_bSqlConnected ( false ) , m_uMinID ( 0 ) , m_uMaxID ( 0 ) , m_uCurrentID ( 0 ) , m_uMaxFetchedID ( 0 ) , m_iMultiAttr ( -1 ) , m_iSqlFields ( 0 ) , m_bCanUnpack ( false ) , m_bUnpackFailed ( false ) , m_bUnpackOverflow ( false ) , m_iJoinedHitField ( -1 ) , m_iJoinedHitID ( 0 ) , m_iJoinedHitPos ( 0 ) { } bool CSphSource_SQL::Setup ( const CSphSourceParams_SQL & tParams ) { // checks assert ( !tParams.m_sQuery.IsEmpty() ); m_tParams = tParams; // defaults #define LOC_FIX_NULL(_arg) if ( !m_tParams._arg.cstr() ) m_tParams._arg = ""; LOC_FIX_NULL ( m_sHost ); LOC_FIX_NULL ( m_sUser ); LOC_FIX_NULL ( m_sPass ); LOC_FIX_NULL ( m_sDB ); #undef LOC_FIX_NULL #define LOC_FIX_QARRAY(_arg) \ ARRAY_FOREACH ( i, m_tParams._arg ) \ if ( m_tParams._arg[i].IsEmpty() ) \ m_tParams._arg.Remove ( i-- ); LOC_FIX_QARRAY ( m_dQueryPre ); LOC_FIX_QARRAY ( m_dQueryPost ); LOC_FIX_QARRAY ( m_dQueryPostIndex ); #undef LOC_FIX_QARRAY // build and store default DSN for error reporting char sBuf [ 1024 ]; snprintf ( sBuf, sizeof(sBuf), "sql://%s:***@%s:%d/%s", m_tParams.m_sUser.cstr(), m_tParams.m_sHost.cstr(), m_tParams.m_iPort, m_tParams.m_sDB.cstr() ); m_sSqlDSN = sBuf; if ( m_tParams.m_iMaxFileBufferSize > 0 ) m_iMaxFileBufferSize = m_tParams.m_iMaxFileBufferSize; m_eOnFileFieldError = m_tParams.m_eOnFileFieldError; return true; } bool CSphSource_SQL::RunQueryStep ( const char * sQuery, CSphString & sError ) { sError = ""; if ( m_tParams.m_iRangeStep<=0 ) return false; if ( m_uCurrentID>m_uMaxID ) return false; static const int iBufSize = 32; char * sRes = NULL; sphSleepMsec ( m_tParams.m_iRangedThrottle ); ////////////////////////////////////////////// // range query with $start/$end interpolation ////////////////////////////////////////////// assert ( m_uMinID>0 ); assert ( m_uMaxID>0 ); assert ( m_uMinID<=m_uMaxID ); assert ( sQuery ); char sValues [ MACRO_COUNT ] [ iBufSize ]; SphDocID_t uNextID = Min ( m_uCurrentID + m_tParams.m_iRangeStep - 1, m_uMaxID ); snprintf ( sValues[0], iBufSize, DOCID_FMT, m_uCurrentID ); snprintf ( sValues[1], iBufSize, DOCID_FMT, uNextID ); g_iIndexerCurrentRangeMin = m_uCurrentID; g_iIndexerCurrentRangeMax = uNextID; m_uCurrentID = 1 + uNextID; // OPTIMIZE? things can be precalculated const char * sCur = sQuery; int iLen = 0; while ( *sCur ) { if ( *sCur=='$' ) { int i; for ( i=0; im_uMaxID ) LOC_ERROR2 ( "sql_query_range: min_id='%s', max_id='%s': min_id must be less than max_id", sCol0, sCol1 ); } SqlDismissResult (); return true; } /// issue main rows fetch query bool CSphSource_SQL::IterateStart ( CSphString & sError ) { assert ( m_bSqlConnected ); m_iNullIds = false; m_iMaxIds = false; // run pre-queries ARRAY_FOREACH ( i, m_tParams.m_dQueryPre ) { if ( !SqlQuery ( m_tParams.m_dQueryPre[i].cstr() ) ) { sError.SetSprintf ( "sql_query_pre[%d]: %s (DSN=%s)", i, SqlError(), m_sSqlDSN.cstr() ); SqlDisconnect (); return false; } SqlDismissResult (); } for ( ;; ) { m_tParams.m_iRangeStep = 0; // issue first fetch query if ( !m_tParams.m_sQueryRange.IsEmpty() ) { m_tParams.m_iRangeStep = m_tParams.m_iRefRangeStep; // run range-query; setup ranges if ( !SetupRanges ( m_tParams.m_sQueryRange.cstr(), m_tParams.m_sQuery.cstr(), "sql_query_range: ", sError ) ) return false; // issue query m_uCurrentID = m_uMinID; if ( !RunQueryStep ( m_tParams.m_sQuery.cstr(), sError ) ) return false; } else { // normal query; just issue if ( !SqlQuery ( m_tParams.m_sQuery.cstr() ) ) { sError.SetSprintf ( "sql_query: %s (DSN=%s)", SqlError(), m_sSqlDSN.cstr() ); return false; } } break; } // some post-query setup m_tSchema.Reset(); for ( int i=0; i dFound; dFound.Resize ( m_tParams.m_dAttrs.GetLength() ); ARRAY_FOREACH ( i, dFound ) dFound[i] = false; const bool bWordDict = m_pDict->GetSettings().m_bWordDict; // map plain attrs from SQL for ( int i=0; iSPH_MAX_FIELDS ) LOC_ERROR2 ( "too many fields (fields=%d, max=%d)", m_tSchema.m_dFields.GetLength(), SPH_MAX_FIELDS ); // log it if ( m_fpDumpRows ) { const char * sTable = m_tSchema.m_sName.cstr(); time_t iNow = time ( NULL ); fprintf ( m_fpDumpRows, "#\n# === source %s ts %d\n# %s#\n", sTable, (int)iNow, ctime ( &iNow ) ); ARRAY_FOREACH ( i, m_tSchema.m_dFields ) fprintf ( m_fpDumpRows, "# field %d: %s\n", i, m_tSchema.m_dFields[i].m_sName.cstr() ); for ( int i=0; i=m_tSchema.GetAttrsCount() ) return false; m_iMultiAttr = iAttr; const CSphColumnInfo & tAttr = m_tSchema.GetAttr(iAttr); if ( !(tAttr.m_eAttrType==SPH_ATTR_UINT32SET || tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) ) return false; CSphString sPrefix; switch ( tAttr.m_eSrc ) { case SPH_ATTRSRC_FIELD: return false; case SPH_ATTRSRC_QUERY: // run simple query if ( !SqlQuery ( tAttr.m_sQuery.cstr() ) ) { sError.SetSprintf ( "multi-valued attr '%s' query failed: %s", tAttr.m_sName.cstr(), SqlError() ); return false; } break; case SPH_ATTRSRC_RANGEDQUERY: m_tParams.m_iRangeStep = m_tParams.m_iRefRangeStep; // setup ranges sPrefix.SetSprintf ( "multi-valued attr '%s' ranged query: ", tAttr.m_sName.cstr() ); if ( !SetupRanges ( tAttr.m_sQueryRange.cstr(), tAttr.m_sQuery.cstr(), sPrefix.cstr(), sError ) ) return false; // run first step (in order to report errors) m_uCurrentID = m_uMinID; if ( !RunQueryStep ( tAttr.m_sQuery.cstr(), sError ) ) return false; break; default: sError.SetSprintf ( "INTERNAL ERROR: unknown multi-valued attr source type %d", tAttr.m_eSrc ); return false; } // check fields count if ( SqlNumFields()!=2 ) { sError.SetSprintf ( "multi-valued attr '%s' query returned %d fields (expected 2)", tAttr.m_sName.cstr(), SqlNumFields() ); SqlDismissResult (); return false; } return true; } bool CSphSource_SQL::IterateMultivaluedNext () { const CSphColumnInfo & tAttr = m_tSchema.GetAttr ( m_iMultiAttr ); assert ( m_bSqlConnected ); assert ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET || tAttr.m_eAttrType==SPH_ATTR_UINT64SET ); // fetch next row bool bGotRow = SqlFetchRow (); while ( !bGotRow ) { if ( SqlIsError() ) sphDie ( "sql_fetch_row: %s", SqlError() ); // FIXME! this should be reported if ( tAttr.m_eSrc!=SPH_ATTRSRC_RANGEDQUERY ) return false; CSphString sTmp; if ( !RunQueryStep ( tAttr.m_sQuery.cstr(), sTmp ) ) // FIXME! this should be reported return false; bGotRow = SqlFetchRow (); continue; } // return that tuple or offset to storage for MVA64 value m_tDocInfo.m_iDocID = sphToDocid ( SqlColumn(0) ); m_dMva.Resize ( 0 ); if ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET ) m_dMva.Add ( sphToDword ( SqlColumn(1) ) ); else sphAddMva64 ( m_dMva, sphToUint64 ( SqlColumn(1) ) ); return true; } bool CSphSource_SQL::IterateKillListStart ( CSphString & sError ) { if ( m_tParams.m_sQueryKilllist.IsEmpty () ) return false; if ( !SqlQuery ( m_tParams.m_sQueryKilllist.cstr () ) ) { sError.SetSprintf ( "killlist query failed: %s", SqlError() ); return false; } return true; } bool CSphSource_SQL::IterateKillListNext ( SphDocID_t & tDocId ) { if ( SqlFetchRow () ) tDocId = sphToDocid ( SqlColumn(0) ); else { if ( SqlIsError() ) sphDie ( "sql_query_killlist: %s", SqlError() ); // FIXME! this should be reported else { SqlDismissResult (); return false; } } return true; } void CSphSource_SQL::ReportUnpackError ( int iIndex, int iError ) { if ( !m_bUnpackFailed ) { m_bUnpackFailed = true; sphWarn ( "failed to unpack column '%s', error=%d, docid=" DOCID_FMT, SqlFieldName(iIndex), iError, m_tDocInfo.m_iDocID ); } } #if !USE_ZLIB const char * CSphSource_SQL::SqlUnpackColumn ( int iFieldIndex, ESphUnpackFormat ) { return SqlColumn ( m_tSchema.m_dFields[iFieldIndex].m_iIndex ); } #else const char * CSphSource_SQL::SqlUnpackColumn ( int iFieldIndex, ESphUnpackFormat eFormat ) { int iIndex = m_tSchema.m_dFields[iFieldIndex].m_iIndex; const char * pData = SqlColumn(iIndex); if ( pData==NULL ) return NULL; int iPackedLen = SqlColumnLength(iIndex); if ( iPackedLen<=0 ) return NULL; CSphVector & tBuffer = m_dUnpackBuffers[iFieldIndex]; switch ( eFormat ) { case SPH_UNPACK_MYSQL_COMPRESS: { if ( iPackedLen<=4 ) { if ( !m_bUnpackFailed ) { m_bUnpackFailed = true; sphWarn ( "failed to unpack '%s', invalid column size (size=%d), docid="DOCID_FMT, SqlFieldName(iIndex), iPackedLen, m_tDocInfo.m_iDocID ); } return NULL; } unsigned long uSize = 0; for ( int i=0; i<4; i++ ) uSize += ((unsigned long)((BYTE)pData[i])) << ( 8*i ); uSize &= 0x3FFFFFFF; if ( uSize > m_tParams.m_uUnpackMemoryLimit ) { if ( !m_bUnpackOverflow ) { m_bUnpackOverflow = true; sphWarn ( "failed to unpack '%s', column size limit exceeded (size=%d), docid="DOCID_FMT, SqlFieldName(iIndex), (int)uSize, m_tDocInfo.m_iDocID ); } return NULL; } int iResult; tBuffer.Resize ( uSize + 1 ); unsigned long uLen = iPackedLen-4; iResult = uncompress ( (Bytef *)tBuffer.Begin(), &uSize, (Bytef *)pData + 4, uLen ); if ( iResult==Z_OK ) { tBuffer[uSize] = 0; return &tBuffer[0]; } else ReportUnpackError ( iIndex, iResult ); return NULL; } case SPH_UNPACK_ZLIB: { char * sResult = 0; int iBufferOffset = 0; int iResult; z_stream tStream; tStream.zalloc = Z_NULL; tStream.zfree = Z_NULL; tStream.opaque = Z_NULL; tStream.avail_in = iPackedLen; tStream.next_in = (Bytef *)SqlColumn(iIndex); iResult = inflateInit ( &tStream ); if ( iResult!=Z_OK ) return NULL; for ( ;; ) { tStream.next_out = (Bytef *)&tBuffer[iBufferOffset]; tStream.avail_out = tBuffer.GetLength() - iBufferOffset - 1; iResult = inflate ( &tStream, Z_NO_FLUSH ); if ( iResult==Z_STREAM_END ) { tBuffer [ tStream.total_out ] = 0; sResult = &tBuffer[0]; break; } else if ( iResult==Z_OK ) { assert ( tStream.avail_out==0 ); tBuffer.Resize ( tBuffer.GetLength()*2 ); iBufferOffset = tStream.total_out; } else { ReportUnpackError ( iIndex, iResult ); break; } } inflateEnd ( &tStream ); return sResult; } case SPH_UNPACK_NONE: return pData; } return NULL; } #endif // USE_ZLIB ISphHits * CSphSource_SQL::IterateJoinedHits ( CSphString & sError ) { m_tHits.m_dData.Resize ( 0 ); // eof check if ( m_iJoinedHitField>=m_tSchema.m_dFields.GetLength() ) { m_tDocInfo.m_iDocID = 0; return &m_tHits; } bool bProcessingRanged = true; // my fetch loop while ( m_iJoinedHitFieldm_tDocInfo.m_iDocID ) { sError.SetSprintf ( "joined field '%s': query MUST return document IDs in ASC order", m_tSchema.m_dFields[m_iJoinedHitField].m_sName.cstr() ); return NULL; } // next document? update tracker, reset position if ( m_iJoinedHitIDm_iWordPos ); if ( m_tState.m_bProcessingHits ) break; } else if ( SqlIsError() ) { // error while fetching row sError = SqlError(); return NULL; } else { int iLastField = m_iJoinedHitField; bool bRanged = ( m_iJoinedHitField>=m_tSchema.m_iBaseFields && m_iJoinedHitField=m_tSchema.m_dFields.GetLength() ) { m_tDocInfo.m_iDocID = ( m_tHits.Length() ? 1 : 0 ); // to eof or not to eof return &m_tHits; } SqlDismissResult (); bProcessingRanged = false; bool bCheckNumFields = true; CSphColumnInfo & tJoined = m_tSchema.m_dFields[m_iJoinedHitField]; // start fetching next field if ( tJoined.m_eSrc!=SPH_ATTRSRC_RANGEDQUERY ) { if ( !SqlQuery ( tJoined.m_sQuery.cstr() ) ) { sError = SqlError(); return NULL; } } else { m_tParams.m_iRangeStep = m_tParams.m_iRefRangeStep; // setup ranges for next field if ( iLastField!=m_iJoinedHitField ) { CSphString sPrefix; sPrefix.SetSprintf ( "joined field '%s' ranged query: ", tJoined.m_sName.cstr() ); if ( !SetupRanges ( tJoined.m_sQueryRange.cstr(), tJoined.m_sQuery.cstr(), sPrefix.cstr(), sError ) ) return NULL; m_uCurrentID = m_uMinID; } // run first step (in order to report errors) bool bRes = RunQueryStep ( tJoined.m_sQuery.cstr(), sError ); bProcessingRanged = bRes; // select next documents in range or loop once to process next field bCheckNumFields = bRes; if ( !sError.IsEmpty() ) return NULL; } const int iExpected = m_tSchema.m_dFields[m_iJoinedHitField].m_bPayload ? 3 : 2; if ( bCheckNumFields && SqlNumFields()!=iExpected ) { const char * sName = m_tSchema.m_dFields[m_iJoinedHitField].m_sName.cstr(); sError.SetSprintf ( "joined field '%s': query MUST return exactly %d columns, got %d", sName, iExpected, SqlNumFields() ); return NULL; } m_iJoinedHitID = 0; m_iJoinedHitPos = 0; } } return &m_tHits; } ///////////////////////////////////////////////////////////////////////////// // MYSQL SOURCE ///////////////////////////////////////////////////////////////////////////// #if USE_MYSQL CSphSourceParams_MySQL::CSphSourceParams_MySQL () : m_iFlags ( 0 ) { m_iPort = 3306; } CSphSource_MySQL::CSphSource_MySQL ( const char * sName ) : CSphSource_SQL ( sName ) , m_pMysqlResult ( NULL ) , m_pMysqlFields ( NULL ) , m_tMysqlRow ( NULL ) , m_pMysqlLengths ( NULL ) { m_bCanUnpack = true; } void CSphSource_MySQL::SqlDismissResult () { if ( !m_pMysqlResult ) return; while ( m_pMysqlResult ) { mysql_free_result ( m_pMysqlResult ); m_pMysqlResult = NULL; // stored procedures might return multiple result sets // FIXME? we might want to index all of them // but for now, let's simply dismiss additional result sets if ( mysql_next_result ( &m_tMysqlDriver )==0 ) { m_pMysqlResult = mysql_use_result ( &m_tMysqlDriver ); static bool bOnce = false; if ( !bOnce && m_pMysqlResult && mysql_num_rows ( m_pMysqlResult ) ) { sphWarn ( "indexing of multiple result sets is not supported yet; some results sets were dismissed!" ); bOnce = true; } } } m_pMysqlFields = NULL; m_pMysqlLengths = NULL; } bool CSphSource_MySQL::SqlQuery ( const char * sQuery ) { if ( mysql_query ( &m_tMysqlDriver, sQuery ) ) { if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-QUERY: %s: FAIL\n", sQuery ); return false; } if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-QUERY: %s: ok\n", sQuery ); m_pMysqlResult = mysql_use_result ( &m_tMysqlDriver ); m_pMysqlFields = NULL; return true; } bool CSphSource_MySQL::SqlIsError () { return mysql_errno ( &m_tMysqlDriver )!=0; } const char * CSphSource_MySQL::SqlError () { return mysql_error ( &m_tMysqlDriver ); } bool CSphSource_MySQL::SqlConnect () { mysql_init ( &m_tMysqlDriver ); if ( !m_sSslKey.IsEmpty() || !m_sSslCert.IsEmpty() || !m_sSslCA.IsEmpty() ) mysql_ssl_set ( &m_tMysqlDriver, m_sSslKey.cstr(), m_sSslCert.cstr(), m_sSslCA.cstr(), NULL, NULL ); m_iMysqlConnectFlags |= CLIENT_MULTI_RESULTS; // we now know how to handle this bool bRes = ( NULL!=mysql_real_connect ( &m_tMysqlDriver, m_tParams.m_sHost.cstr(), m_tParams.m_sUser.cstr(), m_tParams.m_sPass.cstr(), m_tParams.m_sDB.cstr(), m_tParams.m_iPort, m_sMysqlUsock.cstr(), m_iMysqlConnectFlags ) ); if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, bRes ? "SQL-CONNECT: ok\n" : "SQL-CONNECT: FAIL\n" ); return bRes; } void CSphSource_MySQL::SqlDisconnect () { if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-DISCONNECT\n" ); mysql_close ( &m_tMysqlDriver ); } int CSphSource_MySQL::SqlNumFields () { if ( !m_pMysqlResult ) return -1; return mysql_num_fields ( m_pMysqlResult ); } bool CSphSource_MySQL::SqlFetchRow () { if ( !m_pMysqlResult ) return false; m_tMysqlRow = mysql_fetch_row ( m_pMysqlResult ); return m_tMysqlRow!=NULL; } const char * CSphSource_MySQL::SqlColumn ( int iIndex ) { if ( !m_pMysqlResult ) return NULL; return m_tMysqlRow[iIndex]; } const char * CSphSource_MySQL::SqlFieldName ( int iIndex ) { if ( !m_pMysqlResult ) return NULL; if ( !m_pMysqlFields ) m_pMysqlFields = mysql_fetch_fields ( m_pMysqlResult ); return m_pMysqlFields[iIndex].name; } DWORD CSphSource_MySQL::SqlColumnLength ( int iIndex ) { if ( !m_pMysqlResult ) return 0; if ( !m_pMysqlLengths ) m_pMysqlLengths = mysql_fetch_lengths ( m_pMysqlResult ); return m_pMysqlLengths[iIndex]; } bool CSphSource_MySQL::Setup ( const CSphSourceParams_MySQL & tParams ) { if ( !CSphSource_SQL::Setup ( tParams ) ) return false; m_sMysqlUsock = tParams.m_sUsock; m_iMysqlConnectFlags = tParams.m_iFlags; m_sSslKey = tParams.m_sSslKey; m_sSslCert = tParams.m_sSslCert; m_sSslCA = tParams.m_sSslCA; // build and store DSN for error reporting char sBuf [ 1024 ]; snprintf ( sBuf, sizeof(sBuf), "mysql%s", m_sSqlDSN.cstr()+3 ); m_sSqlDSN = sBuf; return true; } #endif // USE_MYSQL ///////////////////////////////////////////////////////////////////////////// // PGSQL SOURCE ///////////////////////////////////////////////////////////////////////////// #if USE_PGSQL CSphSourceParams_PgSQL::CSphSourceParams_PgSQL () { m_iRangeStep = 1024; m_iPort = 5432; } CSphSource_PgSQL::CSphSource_PgSQL ( const char * sName ) : CSphSource_SQL ( sName ) , m_pPgResult ( NULL ) , m_iPgRows ( 0 ) , m_iPgRow ( 0 ) { } bool CSphSource_PgSQL::SqlIsError () { return ( m_iPgRow iBufLen ); m_pTag = NULL; m_iTagLength = 0; m_pPipe = NULL; m_pBuffer = NULL; m_pBufferEnd = NULL; m_sBuffer = new BYTE [m_iBufferSize]; if ( iBufLen ) memcpy ( m_sBuffer, dInitialBuf, iBufLen ); } CSphSource_XMLPipe::~CSphSource_XMLPipe () { Disconnect (); SafeDeleteArray ( m_sBuffer ); } void CSphSource_XMLPipe::Disconnect () { m_iInitialBufLen = 0; m_tHits.m_dData.Reset(); m_tSchema.Reset (); if ( m_pPipe ) { pclose ( m_pPipe ); m_pPipe = NULL; } } bool CSphSource_XMLPipe::Setup ( FILE * pPipe, const char * sCommand ) { assert ( sCommand ); m_pPipe = pPipe; m_sCommand = sCommand; return true; } bool CSphSource_XMLPipe::Connect ( CSphString & ) { m_bEOF = false; m_bWarned = false; m_tSchema.m_dFields.Reset (); m_tSchema.m_dFields.Add ( CSphColumnInfo ( "title" ) ); m_tSchema.m_dFields.Add ( CSphColumnInfo ( "body" ) ); CSphColumnInfo tGid ( "gid", SPH_ATTR_INTEGER ); CSphColumnInfo tTs ( "ts", SPH_ATTR_TIMESTAMP ); m_tSchema.AddAttr ( tGid, true ); // all attributes are dynamic at indexing time m_tSchema.AddAttr ( tTs, true ); // all attributes are dynamic at indexing time m_tDocInfo.Reset ( m_tSchema.GetRowSize() ); m_pBuffer = m_iInitialBufLen > 0 ? m_sBuffer : NULL; m_pBufferEnd = m_pBuffer ? m_pBuffer + m_iInitialBufLen : NULL; char sBuf [ 1024 ]; snprintf ( sBuf, sizeof(sBuf), "xmlpipe(%s)", m_sCommand.cstr() ); m_tSchema.m_sName = sBuf; m_tHits.m_dData.Reserve ( MAX_SOURCE_HITS ); return true; } bool CSphSource_XMLPipe::IterateDocument ( CSphString & sError ) { PROFILE ( src_xmlpipe ); char sTitle [ 1024 ]; // FIXME? assert ( m_pPipe ); assert ( m_pTokenizer ); m_tHits.m_dData.Resize ( 0 ); m_bHitsReady = false; ///////////////////////// // parse document header ///////////////////////// // check for eof if ( !SkipWhitespace() ) { m_tDocInfo.m_iDocID = 0; return true; } // look for opening '' tag SetTag ( "document" ); if ( !SkipTag ( true, sError ) ) return false; if ( !ScanInt ( "id", &m_tDocInfo.m_iDocID, sError ) ) return false; m_tStats.m_iTotalDocuments++; SphAttr_t uVal; if ( !ScanInt ( "group", &uVal, sError ) ) uVal = 1; m_tDocInfo.SetAttr ( m_tSchema.GetAttr(0).m_tLocator, uVal ); if ( !ScanInt ( "timestamp", &uVal, sError ) ) uVal = 1; m_tDocInfo.SetAttr ( m_tSchema.GetAttr(1).m_tLocator, uVal ); if ( !ScanStr ( "title", sTitle, sizeof(sTitle), sError ) ) return false; // index title { int iLen = (int)strlen ( sTitle ); Hitpos_t iPos = HITMAN::Create ( 0, 1 ); BYTE * sWord; m_pTokenizer->SetBuffer ( (BYTE*)sTitle, iLen ); while ( ( sWord = m_pTokenizer->GetToken() )!=NULL && m_tHits.Length()GetWordID ( sWord ), iPos ); HITMAN::AddPos ( &iPos, 1 ); } } CheckHitsCount ( "title" ); SetTag ( "body" ); if ( !SkipTag ( true, sError ) ) return false; m_iWordPos = 0; ///////////////////////////// // parse body chunk by chunk ///////////////////////////// // check for body tag end in this buffer const char * szBodyEnd = ""; bool bFirstPass = true; bool bBodyEnd = false; BYTE * p = m_pBuffer; while ( !bBodyEnd ) { p = m_pBuffer; while ( pSetBuffer ( m_pBuffer, p-m_pBuffer ); // tokenize BYTE * sWord; while ( ( sWord = m_pTokenizer->GetToken () )!=NULL && m_tHits.Length()GetWordID ( sWord ), HITMAN::Create ( 1, ++m_iWordPos ) ); CheckHitsCount ( "body" ); m_pBuffer = p; SetTag ( "body" ); // some tag was found if ( bBodyEnd ) { // let's check if it's '' which is the only allowed tag at this point if ( !SkipTag ( false, sError ) ) return false; } else { // search for '' tag bool bFound = false; while ( !bFound ) { while ( m_pBuffer < m_pBufferEnd && *m_pBuffer!='<' ) ++m_pBuffer; BYTE * pBufferTmp = m_pBuffer; if ( m_pBuffer < m_pBufferEnd ) { if ( !SkipTag ( false, sError ) ) { if ( m_bEOF ) return false; else { if ( m_pBuffer==pBufferTmp ) m_pBuffer = pBufferTmp + 1; } } else bFound = true; } else if ( !UpdateBuffer () ) return false; } } // let's check if it's '' which is the only allowed tag at this point SetTag ( "document" ); if ( !SkipTag ( false, sError ) ) return false; // if it was all correct, we have to flush our hits m_bHitsReady = m_tHits.Length()>0; return true; } ISphHits * CSphSource_XMLPipe::IterateHits ( CSphString & ) { if ( !m_bHitsReady ) return NULL; m_bHitsReady = false; return &m_tHits; } SphRange_t CSphSource_XMLPipe::IterateFieldMVAStart ( int ) { SphRange_t tRange; tRange.m_iStart = tRange.m_iLength = 0; return tRange; } void CSphSource_XMLPipe::SetTag ( const char * sTag ) { m_pTag = sTag; m_iTagLength = (int)strlen ( sTag ); } bool CSphSource_XMLPipe::UpdateBuffer () { assert ( m_pBuffer!=m_sBuffer ); int iLeft = Max ( m_pBufferEnd-m_pBuffer, 0 ); if ( iLeft>0 ) memmove ( m_sBuffer, m_pBuffer, iLeft ); size_t iLen = fread ( &m_sBuffer [ iLeft ], 1, m_iBufferSize-iLeft, m_pPipe ); m_tStats.m_iTotalBytes += iLen; m_pBuffer = m_sBuffer; m_pBufferEnd = m_pBuffer+iLeft+iLen; return ( iLen!=0 ); } bool CSphSource_XMLPipe::SkipWhitespace () { for ( ;; ) { // suck in some data if needed if ( m_pBuffer>=m_pBufferEnd ) if ( !UpdateBuffer() ) return false; // skip whitespace while ( (m_pBuffer', got EOF", bOpen ? "" : "/", m_pTag ); return false; } // check tag bool bOk = bOpen ? ( ( m_pBuffer[0]=='<' ) && ( m_pBuffer[m_iTagLength+1]=='>' ) && strncmp ( (char*)(m_pBuffer+1), m_pTag, m_iTagLength )==0 ) : ( ( m_pBuffer[0]=='<' ) && ( m_pBuffer[1]=='/' ) && ( m_pBuffer[m_iTagLength+2]=='>' ) && strncmp ( (char*)(m_pBuffer+2), m_pTag, m_iTagLength )==0 ); if ( !bOk ) { char sGot[64]; int iCopy = Min ( m_pBufferEnd-m_pBuffer, (int)sizeof(sGot)-1 ); strncpy ( sGot, (char*)m_pBuffer, iCopy ); sGot [ iCopy ] = '\0'; sError.SetSprintf ( "xmlpipe: expected '<%s%s>', got '%s'", bOpen ? "" : "/", m_pTag, sGot ); return false; } // got tag m_pBuffer += iAdd+m_iTagLength; assert ( m_pBuffer<=m_pBufferEnd ); return true; } bool CSphSource_XMLPipe::SkipTag ( bool bOpen, CSphString & sError ) { if ( !SkipWhitespace() ) { m_bEOF = true; sError.SetSprintf ( "xmlpipe: expected '<%s%s>', got EOF", bOpen ? "" : "/", m_pTag ); return false; } return CheckTag ( bOpen, sError ); } bool CSphSource_XMLPipe::ScanInt ( const char * sTag, DWORD * pRes, CSphString & sError ) { uint64_t uRes; if ( !ScanInt ( sTag, &uRes, sError ) ) return false; (*pRes) = (DWORD)uRes; return true; } bool CSphSource_XMLPipe::ScanInt ( const char * sTag, uint64_t * pRes, CSphString & sError ) { assert ( sTag ); assert ( pRes ); // scan for SetTag ( sTag ); if ( !SkipTag ( true, sError ) ) return false; if ( !SkipWhitespace() ) { sError.SetSprintf ( "xmlpipe: expected <%s> data, got EOF", m_pTag ); return false; } *pRes = 0; while ( m_pBuffer if ( !SkipTag ( false, sError ) ) return false; return true; } bool CSphSource_XMLPipe::ScanStr ( const char * sTag, char * pRes, int iMaxLength, CSphString & sError ) { assert ( sTag ); assert ( pRes ); char * pEnd = pRes+iMaxLength-1; // scan for SetTag ( sTag ); if ( !SkipTag ( true, sError ) ) return false; if ( !SkipWhitespace() ) { sError.SetSprintf ( "xmlpipe: expected <%s> data, got EOF", m_pTag ); return false; } while ( m_pBuffer if ( !SkipTag ( false, sError ) ) return false; return true; } void CSphSource_XMLPipe::CheckHitsCount ( const char * sField ) { if ( m_tHits.Length()>=MAX_SOURCE_HITS && m_pTokenizer->GetTokenEnd()!=m_pTokenizer->GetBufferEnd() ) sphWarn ( "xmlpipe: collected hits larger than %d(MAX_SOURCE_HITS) while scanning docid=" DOCID_FMT " %s - clipped!!!", MAX_SOURCE_HITS, m_tDocInfo.m_iDocID, sField ); } ///////////////////////////////////////////////////////////////////////////// // XMLPIPE (v2) ///////////////////////////////////////////////////////////////////////////// #if USE_LIBEXPAT || USE_LIBXML /// XML pipe source implementation (v2) class CSphSource_XMLPipe2 : public CSphSource_Document { public: CSphSource_XMLPipe2 ( BYTE * dInitialBuf, int iBufLen, const char * sName, int iFieldBufferMax, bool bFixupUTF8 ); ~CSphSource_XMLPipe2 (); bool Setup ( FILE * pPipe, const CSphConfigSection & hSource ); ///< memorize the command virtual bool Connect ( CSphString & sError ); ///< run the command and open the pipe virtual void Disconnect (); ///< close the pipe virtual bool IterateStart ( CSphString & ) { return true; } ///< Connect() starts getting documents automatically, so this one is empty virtual BYTE ** NextDocument ( CSphString & sError ); ///< parse incoming chunk and emit some hits virtual bool HasAttrsConfigured () { return true; } ///< xmlpipe always has some attrs for now virtual bool IterateMultivaluedStart ( int, CSphString & ) { return false; } virtual bool IterateMultivaluedNext () { return false; } virtual bool IterateKillListStart ( CSphString & ); virtual bool IterateKillListNext ( SphDocID_t & tDocId ); void StartElement ( const char * szName, const char ** pAttrs ); void EndElement ( const char * pName ); void Characters ( const char * pCharacters, int iLen ); #if USE_LIBXML int ReadBuffer ( BYTE * pBuffer, int iLen ); void ProcessNode ( xmlTextReaderPtr pReader ); #endif void Error ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ); private: struct Document_t { SphDocID_t m_iDocID; CSphVector m_dFields; CSphVector m_dAttrs; }; Document_t * m_pCurDocument; CSphVector m_dParsedDocuments; FILE * m_pPipe; ///< incoming stream CSphString m_sCommand; ///< my command CSphString m_sError; CSphVector m_dDefaultAttrs; CSphVector m_dInvalid; CSphVector m_dWarned; int m_iElementDepth; BYTE * m_pBuffer; int m_iBufferSize; CSphVectorm_dFieldPtrs; bool m_bRemoveParsed; bool m_bInDocset; bool m_bInSchema; bool m_bInDocument; bool m_bInKillList; bool m_bInId; bool m_bInIgnoredTag; bool m_bFirstTagAfterDocset; int m_iKillListIterator; CSphVector < SphDocID_t > m_dKillList; int m_iMVA; int m_iMVAIterator; CSphVector < CSphVector > m_dFieldMVAs; CSphVector < int > m_dAttrToMVA; int m_iCurField; int m_iCurAttr; #if USE_LIBEXPAT XML_Parser m_pParser; #endif #if USE_LIBXML xmlTextReaderPtr m_pParser; BYTE * m_pBufferPtr; BYTE * m_pBufferEnd; bool m_bPassedBufferEnd; CSphVector m_dAttrs; #endif int m_iInitialBufSize; int m_iFieldBufferMax; BYTE * m_pFieldBuffer; int m_iFieldBufferLen; bool m_bFixupUTF8; ///< whether to replace invalid utf-8 codepoints with spaces int m_iReparseStart; ///< utf-8 fixerupper might need to postpone a few bytes, starting at this offset int m_iReparseLen; ///< and this much bytes (under 4) const char * DecorateMessage ( const char * sTemplate, ... ) __attribute__ ( ( format ( printf, 2, 3 ) ) ); const char * DecorateMessageVA ( const char * sTemplate, va_list ap ); void ConfigureAttrs ( const CSphVariant * pHead, ESphAttr eAttrType ); void ConfigureFields ( const CSphVariant * pHead ); void AddFieldToSchema ( const char * szName ); void UnexpectedCharaters ( const char * pCharacters, int iLen, const char * szComment ); #if USE_LIBEXPAT bool ParseNextChunk ( int iBufferLen, CSphString & sError ); #endif #if USE_LIBXML int ParseNextChunk ( CSphString & sError ); #endif void DocumentError ( const char * sWhere ) { Error ( "malformed source, found inside %s", sWhere ); // Ideally I'd like to display a notice on the next line that // would say where exactly it's allowed. E.g.: // // must be contained in } }; #if USE_LIBEXPAT // callbacks static void XMLCALL xmlStartElement ( void * user_data, const XML_Char * name, const XML_Char ** attrs ) { CSphSource_XMLPipe2 * pSource = (CSphSource_XMLPipe2 *) user_data; pSource->StartElement ( name, attrs ); } static void XMLCALL xmlEndElement ( void * user_data, const XML_Char * name ) { CSphSource_XMLPipe2 * pSource = (CSphSource_XMLPipe2 *) user_data; pSource->EndElement ( name ); } static void XMLCALL xmlCharacters ( void * user_data, const XML_Char * ch, int len ) { CSphSource_XMLPipe2 * pSource = (CSphSource_XMLPipe2 *) user_data; pSource->Characters ( ch, len ); } #if USE_LIBICONV static int XMLCALL xmlUnknownEncoding ( void *, const XML_Char * name, XML_Encoding * info ) { iconv_t pDesc = iconv_open ( "UTF-16", name ); if ( !pDesc ) return XML_STATUS_ERROR; for ( size_t i = 0; i < 256; i++ ) { char cIn = (char) i; char dOut[4]; memset ( dOut, 0, sizeof ( dOut ) ); #if ICONV_INBUF_CONST const char * pInbuf = &cIn; #else char * pInbuf = &cIn; #endif char * pOutbuf = dOut; size_t iInBytesLeft = 1; size_t iOutBytesLeft = 4; if ( iconv ( pDesc, &pInbuf, &iInBytesLeft, &pOutbuf, &iOutBytesLeft )!=size_t(-1) ) info->map[i] = int ( BYTE ( dOut[0] ) ) << 8 | int ( BYTE ( dOut[1] ) ); else info->map[i] = 0; } iconv_close ( pDesc ); return XML_STATUS_OK; } #endif #endif #if USE_LIBXML int xmlReadBuffers ( void * context, char * buffer, int len ) { CSphSource_XMLPipe2 * pSource = (CSphSource_XMLPipe2 *) context; return pSource->ReadBuffer ( (BYTE*)buffer, len ); } void xmlErrorHandler ( void * arg, const char * msg, xmlParserSeverities severity, xmlTextReaderLocatorPtr locator ) { if ( severity==XML_PARSER_SEVERITY_ERROR ) { int iLine = xmlTextReaderLocatorLineNumber ( locator ); CSphSource_XMLPipe2 * pSource = (CSphSource_XMLPipe2 *) arg; pSource->Error ( "%s (line=%d)", msg, iLine ); } } #endif CSphSource_XMLPipe2::CSphSource_XMLPipe2 ( BYTE * dInitialBuf, int iBufLen, const char * sName, int iFieldBufferMax, bool bFixupUTF8 ) : CSphSource_Document ( sName ) , m_pCurDocument ( NULL ) , m_pPipe ( NULL ) , m_iElementDepth ( 0 ) , m_iBufferSize ( 1048576 ) , m_bRemoveParsed ( false ) , m_bInDocset ( false ) , m_bInSchema ( false ) , m_bInDocument ( false ) , m_bInKillList ( false ) , m_bInId ( false ) , m_bInIgnoredTag ( false ) , m_bFirstTagAfterDocset ( false ) , m_iKillListIterator ( 0 ) , m_iMVA ( 0 ) , m_iMVAIterator ( 0 ) , m_iCurField ( -1 ) , m_iCurAttr ( -1 ) , m_pParser ( NULL ) #if USE_LIBXML , m_pBufferPtr ( NULL ) , m_pBufferEnd ( NULL ) , m_bPassedBufferEnd ( false ) #endif , m_iInitialBufSize ( iBufLen ) , m_iFieldBufferLen ( 0 ) , m_bFixupUTF8 ( bFixupUTF8 ) , m_iReparseStart ( 0 ) , m_iReparseLen ( 0 ) { assert ( m_iBufferSize > iBufLen ); m_pBuffer = new BYTE [m_iBufferSize]; m_iFieldBufferMax = Max ( iFieldBufferMax, 65536 ); m_pFieldBuffer = new BYTE [ m_iFieldBufferMax ]; if ( iBufLen ) memcpy ( m_pBuffer, dInitialBuf, iBufLen ); m_iInitialBufSize = iBufLen; } CSphSource_XMLPipe2::~CSphSource_XMLPipe2 () { Disconnect (); SafeDeleteArray ( m_pBuffer ); SafeDeleteArray ( m_pFieldBuffer ); ARRAY_FOREACH ( i, m_dParsedDocuments ) SafeDelete ( m_dParsedDocuments[i] ); } void CSphSource_XMLPipe2::Disconnect () { if ( m_pPipe ) { pclose ( m_pPipe ); m_pPipe = NULL; } #if USE_LIBEXPAT if ( m_pParser ) { XML_ParserFree ( m_pParser ); m_pParser = NULL; } #endif #if USE_LIBXML if ( m_pParser ) { xmlFreeTextReader ( m_pParser ); m_pParser = NULL; } #endif m_tHits.m_dData.Reset(); m_iInitialBufSize = 0; } void CSphSource_XMLPipe2::Error ( const char * sTemplate, ... ) { if ( !m_sError.IsEmpty() ) return; va_list ap; va_start ( ap, sTemplate ); m_sError = DecorateMessageVA ( sTemplate, ap ); va_end ( ap ); } const char * CSphSource_XMLPipe2::DecorateMessage ( const char * sTemplate, ... ) { va_list ap; va_start ( ap, sTemplate ); const char * sRes = DecorateMessageVA ( sTemplate, ap ); va_end ( ap ); return sRes; } const char * CSphSource_XMLPipe2::DecorateMessageVA ( const char * sTemplate, va_list ap ) { static char sBuf[1024]; snprintf ( sBuf, sizeof(sBuf), "source '%s': ", m_tSchema.m_sName.cstr() ); int iBufLen = strlen ( sBuf ); int iLeft = sizeof(sBuf) - iBufLen; char * szBufStart = sBuf + iBufLen; vsnprintf ( szBufStart, iLeft, sTemplate, ap ); iBufLen = strlen ( sBuf ); iLeft = sizeof(sBuf) - iBufLen; szBufStart = sBuf + iBufLen; #if USE_LIBEXPAT if ( m_pParser ) { SphDocID_t uFailedID = 0; if ( m_dParsedDocuments.GetLength() ) uFailedID = m_dParsedDocuments.Last()->m_iDocID; snprintf ( szBufStart, iLeft, " (line=%d, pos=%d, docid=" DOCID_FMT ")", (int)XML_GetCurrentLineNumber ( m_pParser ), (int)XML_GetCurrentColumnNumber ( m_pParser ), uFailedID ); } #endif #if USE_LIBXML if ( m_pParser ) { SphDocID_t uFailedID = 0; if ( m_dParsedDocuments.GetLength() ) uFailedID = m_dParsedDocuments.Last()->m_iDocID; snprintf ( szBufStart, iLeft, " (docid=" DOCID_FMT ")", uFailedID ); } #endif return sBuf; } void CSphSource_XMLPipe2::AddFieldToSchema ( const char * szName ) { CSphColumnInfo tCol ( szName ); tCol.m_eWordpart = GetWordpart ( tCol.m_sName.cstr(), m_pDict && m_pDict->GetSettings().m_bWordDict ); m_tSchema.m_dFields.Add ( tCol ); } void CSphSource_XMLPipe2::ConfigureAttrs ( const CSphVariant * pHead, ESphAttr eAttrType ) { for ( const CSphVariant * pCur = pHead; pCur; pCur= pCur->m_pNext ) { CSphColumnInfo tCol ( pCur->cstr(), eAttrType ); char * pColon = strchr ( const_cast ( tCol.m_sName.cstr() ), ':' ); if ( pColon ) { *pColon = '\0'; if ( eAttrType==SPH_ATTR_INTEGER ) { int iBits = strtol ( pColon+1, NULL, 10 ); if ( iBits<=0 || iBits>ROWITEM_BITS ) { sphWarn ( "%s", DecorateMessage ( "attribute '%s': invalid bitcount=%d (bitcount ignored)", tCol.m_sName.cstr(), iBits ) ); iBits = -1; } tCol.m_tLocator.m_iBitCount = iBits; } else sphWarn ( "%s", DecorateMessage ( "attribute '%s': bitcount is only supported for integer types", tCol.m_sName.cstr() ) ); } tCol.m_iIndex = m_tSchema.GetAttrsCount (); if ( eAttrType==SPH_ATTR_UINT32SET || eAttrType==SPH_ATTR_UINT64SET ) { tCol.m_eAttrType = eAttrType; tCol.m_eSrc = SPH_ATTRSRC_FIELD; } m_tSchema.AddAttr ( tCol, true ); // all attributes are dynamic at indexing time } } void CSphSource_XMLPipe2::ConfigureFields ( const CSphVariant * pHead ) { for ( const CSphVariant * pCur = pHead; pCur; pCur= pCur->m_pNext ) { CSphString sFieldName = pCur->cstr (); bool bFound = false; for ( int i = 0; i < m_tSchema.m_dFields.GetLength () && !bFound; i++ ) bFound = m_tSchema.m_dFields[i].m_sName==sFieldName; if ( bFound ) sphWarn ( "%s", DecorateMessage ( "duplicate field '%s'", sFieldName.cstr () ) ); else AddFieldToSchema ( sFieldName.cstr () ); } } bool CSphSource_XMLPipe2::Setup ( FILE * pPipe, const CSphConfigSection & hSource ) { m_pPipe = pPipe; m_tSchema.Reset (); m_sCommand = hSource["xmlpipe_command"].cstr (); ConfigureAttrs ( hSource("xmlpipe_attr_uint"), SPH_ATTR_INTEGER ); ConfigureAttrs ( hSource("xmlpipe_attr_timestamp"), SPH_ATTR_TIMESTAMP ); ConfigureAttrs ( hSource("xmlpipe_attr_str2ordinal"), SPH_ATTR_ORDINAL ); ConfigureAttrs ( hSource("xmlpipe_attr_bool"), SPH_ATTR_BOOL ); ConfigureAttrs ( hSource("xmlpipe_attr_float"), SPH_ATTR_FLOAT ); ConfigureAttrs ( hSource("xmlpipe_attr_bigint"), SPH_ATTR_BIGINT ); ConfigureAttrs ( hSource("xmlpipe_attr_multi"), SPH_ATTR_UINT32SET ); ConfigureAttrs ( hSource("xmlpipe_attr_multi_64"), SPH_ATTR_UINT64SET ); ConfigureAttrs ( hSource("xmlpipe_attr_string"), SPH_ATTR_STRING ); ConfigureAttrs ( hSource("xmlpipe_attr_wordcount"), SPH_ATTR_WORDCOUNT ); ConfigureAttrs ( hSource("xmlpipe_field_string"), SPH_ATTR_STRING ); ConfigureAttrs ( hSource("xmlpipe_field_wordcount"), SPH_ATTR_WORDCOUNT ); m_tDocInfo.Reset ( m_tSchema.GetRowSize () ); ConfigureFields ( hSource("xmlpipe_field") ); ConfigureFields ( hSource("xmlpipe_field_string") ); ConfigureFields ( hSource("xmlpipe_field_wordcount") ); m_dStrAttrs.Resize ( m_tSchema.GetAttrsCount() ); return true; } bool CSphSource_XMLPipe2::Connect ( CSphString & sError ) { ARRAY_FOREACH ( i, m_tSchema.m_dFields ) { CSphColumnInfo & tCol = m_tSchema.m_dFields[i]; tCol.m_eWordpart = GetWordpart ( tCol.m_sName.cstr(), m_pDict && m_pDict->GetSettings().m_bWordDict ); } #if USE_LIBEXPAT m_pParser = XML_ParserCreate(NULL); if ( !m_pParser ) { sError.SetSprintf ( "xmlpipe: failed to create XML parser" ); return false; } XML_SetUserData ( m_pParser, this ); XML_SetElementHandler ( m_pParser, xmlStartElement, xmlEndElement ); XML_SetCharacterDataHandler ( m_pParser, xmlCharacters ); #if USE_LIBICONV XML_SetUnknownEncodingHandler ( m_pParser, xmlUnknownEncoding, NULL ); #endif #endif #if USE_LIBXML m_pBufferPtr = m_pBuffer; m_pBufferEnd = m_pBuffer + m_iInitialBufSize; m_bPassedBufferEnd = false; m_dAttrs.Reserve ( 16 ); m_dAttrs.Resize ( 0 ); m_pParser = xmlReaderForIO ( (xmlInputReadCallback)xmlReadBuffers, NULL, this, NULL, NULL, 0 ); if ( !m_pParser ) { sError.SetSprintf ( "xmlpipe: failed to create XML parser" ); return false; } xmlTextReaderSetErrorHandler ( m_pParser, xmlErrorHandler, this ); #endif m_dKillList.Reserve ( 1024 ); m_dKillList.Resize ( 0 ); m_bRemoveParsed = false; m_bInDocset = false; m_bInSchema = false; m_bInDocument = false; m_bInKillList = false; m_bInId = false; m_bFirstTagAfterDocset = false; m_iCurField = -1; m_iCurAttr = -1; m_iElementDepth = 0; m_dParsedDocuments.Reset (); m_dDefaultAttrs.Reset (); m_dInvalid.Reset (); m_dWarned.Reset (); m_dParsedDocuments.Reserve ( 1024 ); m_dParsedDocuments.Resize ( 0 ); m_iKillListIterator = 0; m_iMVA = 0; m_iMVAIterator = 0; m_sError = ""; #if USE_LIBEXPAT int iBytesRead = m_iInitialBufSize; iBytesRead += fread ( m_pBuffer + m_iInitialBufSize, 1, m_iBufferSize - m_iInitialBufSize, m_pPipe ); if ( !ParseNextChunk ( iBytesRead, sError ) ) return false; #endif #if USE_LIBXML if ( ParseNextChunk ( sError )==-1 ) return false; #endif m_dAttrToMVA.Resize ( 0 ); int iFieldMVA = 0; for ( int i = 0; i < m_tSchema.GetAttrsCount (); i++ ) { const CSphColumnInfo & tCol = m_tSchema.GetAttr ( i ); if ( ( tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET ) && tCol.m_eSrc==SPH_ATTRSRC_FIELD ) m_dAttrToMVA.Add ( iFieldMVA++ ); else m_dAttrToMVA.Add ( -1 ); } m_dFieldMVAs.Resize ( iFieldMVA ); ARRAY_FOREACH ( i, m_dFieldMVAs ) m_dFieldMVAs[i].Reserve ( 16 ); m_tHits.m_dData.Reserve ( m_iMaxHits ); return true; } #if USE_LIBXML int CSphSource_XMLPipe2::ParseNextChunk ( CSphString & sError ) { int iRet = xmlTextReaderRead ( m_pParser ); while ( iRet==1 ) { ProcessNode ( m_pParser ); if ( !m_sError.IsEmpty () ) { sError = m_sError; m_tDocInfo.m_iDocID = 1; return false; } if ( m_bPassedBufferEnd ) break; iRet = xmlTextReaderRead ( m_pParser ); } m_bPassedBufferEnd = false; if ( !m_sError.IsEmpty () || iRet==-1 ) { sError = m_sError; m_tDocInfo.m_iDocID = 1; return -1; } return iRet; } #endif #if USE_LIBEXPAT bool CSphSource_XMLPipe2::ParseNextChunk ( int iBufferLen, CSphString & sError ) { if ( !iBufferLen ) return true; bool bLast = ( iBufferLen!=m_iBufferSize ); m_iReparseLen = 0; if ( m_bFixupUTF8 ) { BYTE * p = m_pBuffer; BYTE * pMax = m_pBuffer + iBufferLen; while ( p3 ) { *p++ = ' '; continue; } // if we're on a boundary, save these few bytes for the future if ( p+iBytes>pMax ) { m_iReparseStart = (int)(p-m_pBuffer); m_iReparseLen = (int)(pMax-p); iBufferLen -= m_iReparseLen; break; } // otherwise (not a boundary), check them all int i = 1; int iVal = ( v >> iBytes ); for ( ; i=0xd800 && iVal<=0xdfff ) // and utf-16 surrogate pairs || ( iBytes==3 && iVal<0x800 ) // and overlong 3-byte codes || ( iVal>=0xfff0 && iVal<=0xffff ) ) // and kinda-valid specials expat chokes on anyway { for ( i=0; im_iDocID; sError.SetSprintf ( "source '%s': XML parse error: %s (line=%d, pos=%d, docid=" DOCID_FMT ")", m_tSchema.m_sName.cstr(), XML_ErrorString ( XML_GetErrorCode ( m_pParser ) ), (int)XML_GetCurrentLineNumber ( m_pParser ), (int)XML_GetCurrentColumnNumber ( m_pParser ), uFailedID ); m_tDocInfo.m_iDocID = 1; return false; } if ( !m_sError.IsEmpty () ) { sError = m_sError; m_tDocInfo.m_iDocID = 1; return false; } return true; } #endif BYTE ** CSphSource_XMLPipe2::NextDocument ( CSphString & sError ) { if ( m_bRemoveParsed ) { SafeDelete ( m_dParsedDocuments[0] ); m_dParsedDocuments.RemoveFast ( 0 ); m_bRemoveParsed = false; } int iReadResult = 0; #if USE_LIBEXPAT while ( m_dParsedDocuments.GetLength()==0 ) { // saved bytes to the front! if ( m_iReparseLen ) memmove ( m_pBuffer, m_pBuffer+m_iReparseStart, m_iReparseLen ); // read more data iReadResult = fread ( m_pBuffer+m_iReparseLen, 1, m_iBufferSize-m_iReparseLen, m_pPipe ); if ( iReadResult==0 ) break; // and parse it if ( !ParseNextChunk ( iReadResult+m_iReparseLen, sError ) ) return NULL; } #endif #if USE_LIBXML while ( m_dParsedDocuments.GetLength()==0 && ( iReadResult = ParseNextChunk ( sError ) )==1 ); #endif while ( m_dParsedDocuments.GetLength()!=0 ) { Document_t * pDocument = m_dParsedDocuments[0]; int nAttrs = m_tSchema.GetAttrsCount (); // docid m_tDocInfo.m_iDocID = VerifyID ( pDocument->m_iDocID ); if ( m_tDocInfo.m_iDocID==0 ) { SafeDelete ( m_dParsedDocuments[0] ); m_dParsedDocuments.RemoveFast ( 0 ); continue; } // attributes for ( int i = 0; i < nAttrs; i++ ) { const CSphString & sAttrValue = pDocument->m_dAttrs[i].IsEmpty () && m_dDefaultAttrs.GetLength () ? m_dDefaultAttrs[i] : pDocument->m_dAttrs[i]; const CSphColumnInfo & tAttr = m_tSchema.GetAttr ( i ); if ( tAttr.m_eAttrType==SPH_ATTR_UINT32SET || tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) { m_tDocInfo.SetAttr ( tAttr.m_tLocator, ParseFieldMVA ( m_dMva, sAttrValue.cstr (), tAttr.m_eAttrType==SPH_ATTR_UINT64SET ) ); continue; } switch ( tAttr.m_eAttrType ) { case SPH_ATTR_ORDINAL: case SPH_ATTR_STRING: case SPH_ATTR_WORDCOUNT: m_dStrAttrs[i] = sAttrValue.cstr (); if ( !m_dStrAttrs[i].cstr() ) m_dStrAttrs[i] = ""; m_tDocInfo.SetAttr ( tAttr.m_tLocator, 0 ); break; case SPH_ATTR_FLOAT: m_tDocInfo.SetAttrFloat ( tAttr.m_tLocator, sphToFloat ( sAttrValue.cstr () ) ); break; case SPH_ATTR_BIGINT: m_tDocInfo.SetAttr ( tAttr.m_tLocator, sphToInt64 ( sAttrValue.cstr () ) ); break; default: m_tDocInfo.SetAttr ( tAttr.m_tLocator, sphToDword ( sAttrValue.cstr () ) ); break; } } m_bRemoveParsed = true; int nFields = m_tSchema.m_dFields.GetLength (); if ( !nFields ) { m_tDocInfo.m_iDocID = 0; return NULL; } m_dFieldPtrs.Resize ( nFields ); for ( int i = 0; i < nFields; ++i ) m_dFieldPtrs[i] = (BYTE*)( pDocument->m_dFields [i].cstr() ); return (BYTE **)&( m_dFieldPtrs[0] ); } if ( !iReadResult ) m_tDocInfo.m_iDocID = 0; return NULL; } bool CSphSource_XMLPipe2::IterateKillListStart ( CSphString & ) { m_iKillListIterator = 0; return true; } bool CSphSource_XMLPipe2::IterateKillListNext ( SphDocID_t & tDocId ) { if ( m_iKillListIterator>=m_dKillList.GetLength () ) return false; tDocId = m_dKillList [ m_iKillListIterator++ ]; return true; } void CSphSource_XMLPipe2::StartElement ( const char * szName, const char ** pAttrs ) { if ( !strcmp ( szName, "sphinx:docset" ) ) { m_bInDocset = true; m_bFirstTagAfterDocset = true; return; } if ( !strcmp ( szName, "sphinx:schema" ) ) { if ( !m_bInDocset || !m_bFirstTagAfterDocset ) { Error ( " is allowed immediately after only" ); return; } if ( m_tSchema.m_dFields.GetLength () > 0 || m_tSchema.GetAttrsCount () > 0 ) { sphWarn ( "%s", DecorateMessage ( "both embedded and configured schemas found; using embedded" ) ); m_tSchema.Reset (); } m_bFirstTagAfterDocset = false; m_bInSchema = true; return; } if ( !strcmp ( szName, "sphinx:field" ) ) { if ( !m_bInDocset || !m_bInSchema ) { Error ( " is allowed inside only" ); return; } const char ** dAttrs = pAttrs; CSphColumnInfo Info; CSphString sDefault; bool bIsAttr = false; while ( dAttrs[0] && dAttrs[1] && dAttrs[0][0] && dAttrs[1][0] ) { if ( !strcmp ( *dAttrs, "name" ) ) { AddFieldToSchema ( dAttrs[1] ); Info.m_sName = dAttrs[1]; } else if ( !strcmp ( *dAttrs, "attr" ) ) { bIsAttr = true; if ( !strcmp ( dAttrs[1], "string" ) ) Info.m_eAttrType = SPH_ATTR_STRING; else if ( !strcmp ( dAttrs[1], "wordcount" ) ) Info.m_eAttrType = SPH_ATTR_WORDCOUNT; } else if ( !strcmp ( *dAttrs, "default" ) ) sDefault = dAttrs[1]; dAttrs += 2; } if ( bIsAttr ) { Info.m_iIndex = m_tSchema.GetAttrsCount (); m_tSchema.AddAttr ( Info, true ); // all attributes are dynamic at indexing time m_dDefaultAttrs.Add ( sDefault ); } return; } if ( !strcmp ( szName, "sphinx:attr" ) ) { if ( !m_bInDocset || !m_bInSchema ) { Error ( " is allowed inside only" ); return; } bool bError = false; CSphString sDefault; CSphColumnInfo Info; Info.m_eAttrType = SPH_ATTR_INTEGER; const char ** dAttrs = pAttrs; while ( dAttrs[0] && dAttrs[1] && dAttrs[0][0] && dAttrs[1][0] && !bError ) { if ( !strcmp ( *dAttrs, "name" ) ) Info.m_sName = dAttrs[1]; else if ( !strcmp ( *dAttrs, "bits" ) ) Info.m_tLocator.m_iBitCount = strtol ( dAttrs[1], NULL, 10 ); else if ( !strcmp ( *dAttrs, "default" ) ) sDefault = dAttrs[1]; else if ( !strcmp ( *dAttrs, "type" ) ) { const char * szType = dAttrs[1]; if ( !strcmp ( szType, "int" ) ) Info.m_eAttrType = SPH_ATTR_INTEGER; else if ( !strcmp ( szType, "timestamp" ) ) Info.m_eAttrType = SPH_ATTR_TIMESTAMP; else if ( !strcmp ( szType, "str2ordinal" ) ) Info.m_eAttrType = SPH_ATTR_ORDINAL; else if ( !strcmp ( szType, "bool" ) ) Info.m_eAttrType = SPH_ATTR_BOOL; else if ( !strcmp ( szType, "float" ) ) Info.m_eAttrType = SPH_ATTR_FLOAT; else if ( !strcmp ( szType, "bigint" ) ) Info.m_eAttrType = SPH_ATTR_BIGINT; else if ( !strcmp ( szType, "string" ) ) Info.m_eAttrType = SPH_ATTR_STRING; else if ( !strcmp ( szType, "wordcount" ) ) Info.m_eAttrType = SPH_ATTR_WORDCOUNT; else if ( !strcmp ( szType, "multi" ) ) { Info.m_eAttrType = SPH_ATTR_UINT32SET; Info.m_eSrc = SPH_ATTRSRC_FIELD; } else if ( !strcmp ( szType, "multi_64" ) ) { Info.m_eAttrType = SPH_ATTR_UINT64SET; Info.m_eSrc = SPH_ATTRSRC_FIELD; } else { Error ( "unknown column type '%s'", szType ); bError = true; } } dAttrs += 2; } if ( !bError ) { Info.m_iIndex = m_tSchema.GetAttrsCount (); m_tSchema.AddAttr ( Info, true ); // all attributes are dynamic at indexing time m_dDefaultAttrs.Add ( sDefault ); } return; } if ( !strcmp ( szName, "sphinx:document" ) ) { if ( !m_bInDocset || m_bInSchema ) return DocumentError ( "" ); if ( m_bInKillList ) return DocumentError ( "" ); if ( m_bInDocument ) return DocumentError ( "" ); if ( m_tSchema.m_dFields.GetLength()==0 && m_tSchema.GetAttrsCount()==0 ) { Error ( "no schema configured, and no embedded schema found" ); return; } m_bInDocument = true; assert ( !m_pCurDocument ); m_pCurDocument = new Document_t; m_pCurDocument->m_iDocID = 0; m_pCurDocument->m_dFields.Resize ( m_tSchema.m_dFields.GetLength () ); m_pCurDocument->m_dAttrs.Resize ( m_tSchema.GetAttrsCount () ); if ( pAttrs[0] && pAttrs[1] && pAttrs[0][0] && pAttrs[1][0] ) if ( !strcmp ( pAttrs[0], "id" ) ) m_pCurDocument->m_iDocID = sphToDocid ( pAttrs[1] ); if ( m_pCurDocument->m_iDocID==0 ) Error ( "attribute 'id' required in " ); return; } if ( !strcmp ( szName, "sphinx:killlist" ) ) { if ( !m_bInDocset || m_bInDocument || m_bInSchema ) { Error ( " is not allowed inside or " ); return; } m_bInKillList = true; return; } if ( m_bInKillList ) { if ( !m_bInId ) { if ( strcmp ( szName, "id" ) ) { Error ( "only 'id' is allowed inside " ); return; } m_bInId = true; } else ++m_iElementDepth; } if ( m_bInDocument ) { if ( m_iCurField==-1 && m_iCurAttr==-1 ) { for ( int i = 0; i < m_tSchema.m_dFields.GetLength () && m_iCurField==-1; i++ ) if ( m_tSchema.m_dFields[i].m_sName==szName ) m_iCurField = i; for ( int i = 0; i < m_tSchema.GetAttrsCount () && m_iCurAttr==-1; i++ ) if ( m_tSchema.GetAttr(i).m_sName==szName ) m_iCurAttr = i; if ( m_iCurAttr==-1 && m_iCurField==-1 ) { m_bInIgnoredTag = true; bool bInvalidFound = false; for ( int i = 0; i < m_dInvalid.GetLength () && !bInvalidFound; i++ ) bInvalidFound = m_dInvalid[i]==szName; if ( !bInvalidFound ) { sphWarn ( "%s", DecorateMessage ( "unknown field/attribute '%s'; ignored", szName ) ); m_dInvalid.Add ( szName ); } } } else m_iElementDepth++; } } void CSphSource_XMLPipe2::EndElement ( const char * szName ) { m_bInIgnoredTag = false; if ( !strcmp ( szName, "sphinx:docset" ) ) m_bInDocset = false; else if ( !strcmp ( szName, "sphinx:schema" ) ) { m_bInSchema = false; m_tDocInfo.Reset ( m_tSchema.GetRowSize () ); m_dStrAttrs.Resize ( m_tSchema.GetAttrsCount() ); } else if ( !strcmp ( szName, "sphinx:document" ) ) { m_bInDocument = false; if ( m_pCurDocument ) m_dParsedDocuments.Add ( m_pCurDocument ); m_pCurDocument = NULL; } else if ( !strcmp ( szName, "sphinx:killlist" ) ) { m_bInKillList = false; } else if ( m_bInKillList ) { if ( m_iElementDepth==0 ) { if ( m_bInId ) { m_pFieldBuffer [ Min ( m_iFieldBufferLen, m_iFieldBufferMax-1 ) ] = '\0'; m_dKillList.Add ( sphToDocid ( (const char *)m_pFieldBuffer ) ); m_iFieldBufferLen = 0; m_bInId = false; } } else m_iElementDepth--; } else if ( m_bInDocument && ( m_iCurAttr!=-1 || m_iCurField!=-1 ) ) { if ( m_iElementDepth==0 ) { if ( m_iCurField!=-1 ) { assert ( m_pCurDocument ); m_pCurDocument->m_dFields [m_iCurField].SetBinary ( (char*)m_pFieldBuffer, m_iFieldBufferLen ); } if ( m_iCurAttr!=-1 ) { assert ( m_pCurDocument ); m_pCurDocument->m_dAttrs [m_iCurAttr].SetBinary ( (char*)m_pFieldBuffer, m_iFieldBufferLen ); } m_iFieldBufferLen = 0; m_iCurAttr = -1; m_iCurField = -1; } else m_iElementDepth--; } } void CSphSource_XMLPipe2::UnexpectedCharaters ( const char * pCharacters, int iLen, const char * szComment ) { const int MAX_WARNING_LENGTH = 64; bool bSpaces = true; for ( int i = 0; i < iLen && bSpaces; i++ ) if ( !sphIsSpace ( pCharacters[i] ) ) bSpaces = false; if ( !bSpaces ) { CSphString sWarning; #if USE_LIBEXPAT sWarning.SetBinary ( pCharacters, Min ( iLen, MAX_WARNING_LENGTH ) ); sphWarn ( "source '%s': unexpected string '%s' (line=%d, pos=%d) %s", m_tSchema.m_sName.cstr(), sWarning.cstr (), (int)XML_GetCurrentLineNumber ( m_pParser ), (int)XML_GetCurrentColumnNumber ( m_pParser ), szComment ); #endif #if USE_LIBXML int i = 0; for ( i=0; i=0 && sphIsSpace ( sWarning.cstr()[i] ); i-- ); if ( i>=0 ) ( (char *)sWarning.cstr() )[i+1] = '\0'; sphWarn ( "source '%s': unexpected string '%s' %s", m_tSchema.m_sName.cstr(), sWarning.cstr(), szComment ); #endif } } void CSphSource_XMLPipe2::Characters ( const char * pCharacters, int iLen ) { if ( m_bInIgnoredTag ) return; if ( !m_bInDocset ) { UnexpectedCharaters ( pCharacters, iLen, "outside of " ); return; } if ( !m_bInSchema && !m_bInDocument && !m_bInKillList ) { UnexpectedCharaters ( pCharacters, iLen, "outside of and " ); return; } if ( m_iCurAttr==-1 && m_iCurField==-1 && !m_bInKillList ) { UnexpectedCharaters ( pCharacters, iLen, m_bInDocument ? "inside " : ( m_bInSchema ? "inside " : "" ) ); return; } if ( iLen + m_iFieldBufferLen < m_iFieldBufferMax ) { memcpy ( m_pFieldBuffer + m_iFieldBufferLen, pCharacters, iLen ); m_iFieldBufferLen += iLen; } else { const CSphString & sName = ( m_iCurField!=-1 ) ? m_tSchema.m_dFields[m_iCurField].m_sName : m_tSchema.GetAttr ( m_iCurAttr ).m_sName; bool bWarned = false; for ( int i = 0; i < m_dWarned.GetLength () && !bWarned; i++ ) bWarned = m_dWarned[i]==sName; if ( !bWarned ) { #if USE_LIBEXPAT sphWarn ( "source '%s': field/attribute '%s' length exceeds max length (line=%d, pos=%d, docid=" DOCID_FMT ")", m_tSchema.m_sName.cstr(), sName.cstr(), (int)XML_GetCurrentLineNumber ( m_pParser ), (int)XML_GetCurrentColumnNumber ( m_pParser ), m_pCurDocument->m_iDocID ); #endif #if USE_LIBXML sphWarn ( "source '%s': field/attribute '%s' length exceeds max length (docid=" DOCID_FMT ")", m_tSchema.m_sName.cstr(), sName.cstr(), m_pCurDocument->m_iDocID ); #endif m_dWarned.Add ( sName ); } } } #if USE_LIBXML int CSphSource_XMLPipe2::ReadBuffer ( BYTE * pBuffer, int iLen ) { int iLeft = Max ( m_pBufferEnd - m_pBufferPtr, 0 ); if ( iLeft < iLen ) { memmove ( m_pBuffer, m_pBufferPtr, iLeft ); size_t iRead = fread ( m_pBuffer + iLeft, 1, m_iBufferSize - iLeft, m_pPipe ); m_bPassedBufferEnd = ( ( m_iBufferSize - iLeft )==int(iRead) ); m_pBufferPtr = m_pBuffer; m_pBufferEnd = m_pBuffer + iLeft + iRead; iLeft = Max ( m_pBufferEnd - m_pBuffer, 0 ); } int iToCopy = Min ( iLen, iLeft ); memcpy ( pBuffer, m_pBufferPtr, iToCopy ); m_pBufferPtr += iToCopy; return iToCopy; } void CSphSource_XMLPipe2::ProcessNode ( xmlTextReaderPtr pReader ) { int iType = xmlTextReaderNodeType ( pReader ); switch ( iType ) { case XML_READER_TYPE_ELEMENT: { const char * szName = (char*)xmlTextReaderName ( pReader ); m_dAttrs.Resize ( 0 ); if ( xmlTextReaderHasAttributes ( pReader ) ) { if ( xmlTextReaderMoveToFirstAttribute ( pReader )!=1 ) return; do { int iLen = m_dAttrs.GetLength (); m_dAttrs.Resize ( iLen + 2 ); m_dAttrs[iLen] = (char*)xmlTextReaderName ( pReader ); m_dAttrs[iLen+1] = (char*)xmlTextReaderValue ( pReader ); } while ( xmlTextReaderMoveToNextAttribute ( pReader )==1 ); } int iLen = m_dAttrs.GetLength (); m_dAttrs.Resize ( iLen + 2 ); m_dAttrs[iLen] = NULL; m_dAttrs[iLen+1] = NULL; StartElement ( szName, &m_dAttrs[0] ); } break; case XML_READER_TYPE_END_ELEMENT: EndElement ( (char*)xmlTextReaderName ( pReader ) ); break; case XML_TEXT_NODE: { const char * szText = (char*)xmlTextReaderValue ( pReader ); Characters ( szText, strlen ( szText ) ); } break; } } #endif CSphSource * sphCreateSourceXmlpipe2 ( const CSphConfigSection * pSource, FILE * pPipe, BYTE * dInitialBuf, int iBufLen, const char * szSourceName, int iMaxFieldLen ) { CSphSource_XMLPipe2 * pXMLPipe = new CSphSource_XMLPipe2 ( dInitialBuf, iBufLen, szSourceName, iMaxFieldLen, pSource->GetInt ( "xmlpipe_fixup_utf8", 0 )!=0 ); if ( !pXMLPipe->Setup ( pPipe, *pSource ) ) SafeDelete ( pXMLPipe ); return pXMLPipe; } #endif FILE * sphDetectXMLPipe ( const char * szCommand, BYTE * dBuf, int & iBufSize, int iMaxBufSize, bool & bUsePipe2 ) { bUsePipe2 = true; // default is xmlpipe2 FILE * pPipe = popen ( szCommand, "r" ); if ( !pPipe ) return NULL; BYTE * pStart = dBuf; iBufSize = (int)fread ( dBuf, 1, iMaxBufSize, pPipe ); BYTE * pEnd = pStart + iBufSize; // BOM if ( iBufSize>=3 ) if ( !strncmp ( (char*)pStart, "\xEF\xBB\xBF", 3 ) ) pStart += 3; while ( isspace ( *pStart ) && pStart < pEnd ) pStart++; if ( ( pEnd - pStart)>=5 ) bUsePipe2 = !strncasecmp ( (char *)pStart, "WARN_ROW_SIZE ) sphWarn ( "row buffer is over %d bytes; consider revising sql_column_buffers", iTotalBuffer ); return true; } bool CSphSource_ODBC::SqlIsError () { return !m_sError.IsEmpty (); } const char * CSphSource_ODBC::SqlError () { return m_sError.cstr(); } bool CSphSource_ODBC::SqlConnect () { if ( SQLAllocHandle ( SQL_HANDLE_ENV, NULL, &m_hEnv )==SQL_ERROR ) { if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-CONNECT: FAIL\n" ); return false; } SQLSetEnvAttr ( m_hEnv, SQL_ATTR_ODBC_VERSION, (void*) SQL_OV_ODBC3, SQL_IS_INTEGER ); if ( SQLAllocHandle ( SQL_HANDLE_DBC, m_hEnv, &m_hDBC )==SQL_ERROR ) { if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-CONNECT: FAIL\n" ); return false; } OdbcPostConnect (); char szOutConn [2048]; SQLSMALLINT iOutConn = 0; if ( SQLDriverConnect ( m_hDBC, NULL, (SQLTCHAR*) m_sOdbcDSN.cstr(), SQL_NTS, (SQLCHAR*)szOutConn, sizeof(szOutConn), &iOutConn, SQL_DRIVER_NOPROMPT )==SQL_ERROR ) { GetSqlError ( SQL_HANDLE_DBC, m_hDBC ); if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-CONNECT: FAIL\n" ); return false; } if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-CONNECT: ok\n" ); return true; } void CSphSource_ODBC::SqlDisconnect () { if ( m_tParams.m_bPrintQueries ) fprintf ( stdout, "SQL-DISCONNECT\n" ); if ( m_hStmt!=NULL ) SQLFreeHandle ( SQL_HANDLE_STMT, m_hStmt ); if ( m_hDBC ) { SQLDisconnect ( m_hDBC ); SQLFreeHandle ( SQL_HANDLE_DBC, m_hDBC ); } if ( m_hEnv ) SQLFreeHandle ( SQL_HANDLE_ENV, m_hEnv ); } int CSphSource_ODBC::SqlNumFields () { if ( !m_hStmt ) return -1; return m_nResultCols; } bool CSphSource_ODBC::SqlFetchRow () { if ( !m_hStmt ) return false; SQLRETURN iRet = SQLFetch ( m_hStmt ); if ( iRet==SQL_ERROR || iRet==SQL_INVALID_HANDLE ) { GetSqlError ( SQL_HANDLE_STMT, m_hStmt ); return false; } ARRAY_FOREACH ( i, m_dColumns ) { QueryColumn_t & tCol = m_dColumns[i]; switch ( tCol.m_iInd ) { case SQL_NO_DATA: case SQL_NULL_DATA: tCol.m_dContents[0] = '\0'; tCol.m_dContents[0] = '\0'; break; default: #if USE_WINDOWS // FIXME! support UCS-2 columns on Unix too if ( tCol.m_bUnicode ) { // WideCharToMultiByte should get NULL terminated string memset ( tCol.m_dRaw.Begin()+tCol.m_iBufferSize, 0, MS_SQL_BUFFER_GAP ); int iConv = WideCharToMultiByte ( CP_UTF8, 0, LPCWSTR ( tCol.m_dRaw.Begin() ), tCol.m_iInd/sizeof(WCHAR), LPSTR ( tCol.m_dContents.Begin() ), tCol.m_iBufferSize-1, NULL, NULL ); if ( iConv==0 ) if ( GetLastError()==ERROR_INSUFFICIENT_BUFFER ) iConv = tCol.m_iBufferSize-1; tCol.m_dContents[iConv] = '\0'; } else #endif { if ( tCol.m_iInd>=0 && tCol.m_iInd=tCol.m_iBufferSize && !tCol.m_bTruncated ) { // out of buffer; warn about that (once) tCol.m_bTruncated = true; sphWarn ( "'%s' column truncated (buffer=%d, got=%d); consider revising sql_column_buffers", tCol.m_sName.cstr(), tCol.m_iBufferSize-1, (int) tCol.m_iInd ); } } break; } } return iRet!=SQL_NO_DATA; } const char * CSphSource_ODBC::SqlColumn ( int iIndex ) { if ( !m_hStmt ) return NULL; return &(m_dColumns [iIndex].m_dContents[0]); } const char * CSphSource_ODBC::SqlFieldName ( int iIndex ) { return m_dColumns[iIndex].m_sName.cstr(); } DWORD CSphSource_ODBC::SqlColumnLength ( int ) { return 0; } bool CSphSource_ODBC::Setup ( const CSphSourceParams_ODBC & tParams ) { if ( !CSphSource_SQL::Setup ( tParams ) ) return false; // parse column buffers spec, if any if ( !tParams.m_sColBuffers.IsEmpty() ) { const char * p = tParams.m_sColBuffers.cstr(); while ( *p ) { // skip space while ( sphIsSpace(*p) ) p++; // expect eof or ident if ( !*p ) break; if ( !sphIsAlpha(*p) ) { m_sError.SetSprintf ( "identifier expected in sql_column_buffers near '%s'", p ); return false; } // get ident CSphString sCol; const char * pIdent = p; while ( sphIsAlpha(*p) ) p++; sCol.SetBinary ( pIdent, p-pIdent ); // skip space while ( sphIsSpace(*p) ) p++; // expect assignment if ( *p!='=' ) { m_sError.SetSprintf ( "'=' expected in sql_column_buffers near '%s'", p ); return false; } p++; // skip space while ( sphIsSpace(*p) ) p++; // expect number if (!( *p>='0' && *p<='9' )) { m_sError.SetSprintf ( "number expected in sql_column_buffers near '%s'", p ); return false; } // get value int iSize = 0; while ( *p>='0' && *p<='9' ) { iSize = 10*iSize + ( *p-'0' ); p++; } if ( *p=='K' ) { iSize *= 1024; p++; } else if ( *p=='M' ) { iSize *= 1048576; p++; } // hash value sCol.ToLower(); m_hColBuffers.Add ( iSize, sCol ); // skip space while ( sphIsSpace(*p) ) p++; // expect eof or comma if ( !*p ) break; if ( *p!=',' ) { m_sError.SetSprintf ( "comma expected in sql_column_buffers near '%s'", p ); return false; } p++; } } // ODBC specific params m_sOdbcDSN = tParams.m_sOdbcDSN; m_bWinAuth = tParams.m_bWinAuth; m_bUnicode = tParams.m_bUnicode; // build and store DSN for error reporting char sBuf [ 1024 ]; snprintf ( sBuf, sizeof(sBuf), "odbc%s", m_sSqlDSN.cstr()+3 ); m_sSqlDSN = sBuf; return true; } void CSphSource_ODBC::GetSqlError ( SQLSMALLINT iHandleType, SQLHANDLE hHandle ) { if ( !hHandle ) { m_sError.SetSprintf ( "invalid handle" ); return; } char szState[16]; char szMessageText[1024]; SQLINTEGER iError; SQLSMALLINT iLen; SQLGetDiagRec ( iHandleType, hHandle, 1, (SQLCHAR*)szState, &iError, (SQLCHAR*)szMessageText, 1024, &iLen ); m_sError = szMessageText; } ////////////////////////////////////////////////////////////////////////// void CSphSource_MSSQL::OdbcPostConnect () { const int MAX_LEN = 1024; char szDriver[MAX_LEN]; char szDriverAttrs[MAX_LEN]; SQLSMALLINT iDescLen = 0; SQLSMALLINT iAttrLen = 0; SQLSMALLINT iDir = SQL_FETCH_FIRST; CSphString sDriver; for ( ;; ) { SQLRETURN iRet = SQLDrivers ( m_hEnv, iDir, (SQLCHAR*)szDriver, MAX_LEN, &iDescLen, (SQLCHAR*)szDriverAttrs, MAX_LEN, &iAttrLen ); if ( iRet==SQL_NO_DATA ) break; iDir = SQL_FETCH_NEXT; if ( !strcmp ( szDriver, "SQL Native Client" ) || !strncmp ( szDriver, "SQL Server Native Client", strlen("SQL Server Native Client") ) ) { sDriver = szDriver; break; } } if ( sDriver.IsEmpty() ) sDriver = "SQL Server"; if ( m_bWinAuth && m_tParams.m_sUser.IsEmpty () ) { m_sOdbcDSN.SetSprintf ( "DRIVER={%s};SERVER={%s};Database={%s};Trusted_Connection=yes", sDriver.cstr (), m_tParams.m_sHost.cstr (), m_tParams.m_sDB.cstr () ); } else if ( m_bWinAuth ) { m_sOdbcDSN.SetSprintf ( "DRIVER={%s};SERVER={%s};UID={%s};PWD={%s};Database={%s};Trusted_Connection=yes", sDriver.cstr (), m_tParams.m_sHost.cstr (), m_tParams.m_sUser.cstr (), m_tParams.m_sPass.cstr (), m_tParams.m_sDB.cstr () ); } else { m_sOdbcDSN.SetSprintf ( "DRIVER={%s};SERVER={%s};UID={%s};PWD={%s};Database={%s}", sDriver.cstr (), m_tParams.m_sHost.cstr (), m_tParams.m_sUser.cstr (), m_tParams.m_sPass.cstr (), m_tParams.m_sDB.cstr () ); } } #endif ///////////////////////////////////////////////////////////////////////////// // MERGER HELPERS ///////////////////////////////////////////////////////////////////////////// void CSphDocMVA::Read ( CSphReader & tReader ) { m_iDocID = tReader.GetDocid(); if ( m_iDocID ) { ARRAY_FOREACH ( i, m_dMVA ) { DWORD iValues = tReader.GetDword(); m_dMVA[i].Resize ( iValues ); if ( iValues ) tReader.GetBytes ( m_dMVA[i].Begin(), iValues*sizeof(DWORD) ); } } } void CSphDocMVA::Write ( CSphWriter & tWriter ) { tWriter.PutDocid ( m_iDocID ); ARRAY_FOREACH ( i, m_dMVA ) { m_dOffsets[i] = ( DWORD )tWriter.GetPos() / sizeof( DWORD ); int iValues = m_dMVA[i].GetLength(); tWriter.PutDword ( iValues ); if ( iValues ) tWriter.PutBytes ( m_dMVA[i].Begin(), iValues*sizeof(DWORD) ); } } ///////////////////////////////////////////////////////////////////////////// void sphSetQuiet ( bool bQuiet ) { g_bSphQuiet = bQuiet; } static inline float GetPercent ( int64_t a, int64_t b ) { if ( b==0 ) return 100.0f; int64_t r = a*100000/b; return float(r)/1000; } const char * CSphIndexProgress::BuildMessage() const { static char sBuf[256]; switch ( m_ePhase ) { case PHASE_COLLECT: snprintf ( sBuf, sizeof(sBuf), "collected %d docs, %.1f MB", m_iDocuments, float(m_iBytes)/1000000.0f ); break; case PHASE_SORT: snprintf ( sBuf, sizeof(sBuf), "sorted %.1f Mhits, %.1f%% done", float(m_iHits)/1000000, GetPercent ( m_iHits, m_iHitsTotal ) ); break; case PHASE_COLLECT_MVA: snprintf ( sBuf, sizeof(sBuf), "collected "INT64_FMT" attr values", m_iAttrs ); break; case PHASE_SORT_MVA: snprintf ( sBuf, sizeof(sBuf), "sorted %.1f Mvalues, %.1f%% done", float(m_iAttrs)/1000000, GetPercent ( m_iAttrs, m_iAttrsTotal ) ); break; case PHASE_MERGE: snprintf ( sBuf, sizeof(sBuf), "merged %.1f Kwords", float(m_iWords)/1000 ); break; case PHASE_PREREAD: snprintf ( sBuf, sizeof(sBuf), "read %.1f of %.1f MB, %.1f%% done", float(m_iBytes)/1000000.0f, float(m_iBytesTotal)/1000000.0f, GetPercent ( m_iBytes, m_iBytesTotal ) ); break; case PHASE_PRECOMPUTE: snprintf ( sBuf, sizeof(sBuf), "indexing attributes, %d.%d%% done", m_iDone/10, m_iDone%10 ); break; default: assert ( 0 && "internal error: unhandled progress phase" ); snprintf ( sBuf, sizeof(sBuf), "(progress-phase-%d)", m_ePhase ); break; } sBuf[sizeof(sBuf)-1] = '\0'; return sBuf; } ///////////////////////////////////////////////////////////////////////////// int sphDictCmp ( const char * pStr1, int iLen1, const char * pStr2, int iLen2 ) { assert ( pStr1 && pStr2 ); assert ( iLen1 && iLen2 ); const int iCmpLen = Min ( iLen1, iLen2 ); return strncmp ( pStr1, pStr2, iCmpLen ); } int sphDictCmpStrictly ( const char * pStr1, int iLen1, const char * pStr2, int iLen2 ) { assert ( pStr1 && pStr2 ); assert ( iLen1 && iLen2 ); const int iCmpLen = Min ( iLen1, iLen2 ); const int iCmpRes = strncmp ( pStr1, pStr2, iCmpLen ); return iCmpRes==0 ? iLen1-iLen2 : iCmpRes; } WordDictInfo_t::WordDictInfo_t () { m_uOff = 0; m_iDocs = 0; m_iHits = 0; m_iDoclistHint = 0; } WordReaderContext_t::WordReaderContext_t() { m_sWord[0] = '\0'; m_iLen = 0; } CWordlist::CWordlist () : m_dCheckpoints ( 0 ) { m_iCheckpointsPos = 0; m_iSize = 0; m_iMaxChunk = 0; m_bWordDict = false; m_pWords = NULL; } CWordlist::~CWordlist () { Reset(); } void CWordlist::Reset () { m_tFile.Close (); m_pBuf.Reset (); m_dCheckpoints.Reset ( 0 ); SafeDeleteArray ( m_pWords ); } bool CWordlist::ReadCP ( CSphAutofile & tFile, DWORD uVer, bool bWordDict, CSphString & sError ) { assert ( ( uVer>=21 && bWordDict ) || !bWordDict ); assert ( m_iCheckpointsPos>0 ); assert ( m_iSize-m_iCheckpointsPos=0 ); m_pWords = new BYTE[iArenaSize]; assert ( m_pWords ); BYTE * pWord = m_pWords; ARRAY_FOREACH ( i, m_dCheckpoints ) { m_dCheckpoints[i].m_sWord = (char *)pWord; const int iLen = tReader.GetDword(); assert ( iLen>0 ); assert ( iLen+1+(pWord-m_pWords)<=iArenaSize ); tReader.GetBytes ( pWord, iLen ); pWord[iLen] = '\0'; pWord += iLen+1; m_dCheckpoints[i].m_iWordlistOffset = tReader.GetOffset(); } } else if ( uVer>=11 ) { // read v.14 checkpoints ARRAY_FOREACH ( i, m_dCheckpoints ) { m_dCheckpoints[i].m_iWordID = (SphWordID_t)tReader.GetOffset(); m_dCheckpoints[i].m_iWordlistOffset = tReader.GetOffset(); } } else { // convert v.10 checkpoints ARRAY_FOREACH ( i, m_dCheckpoints ) { #if USE_64BIT m_dCheckpoints[i].m_iWordID = tReader.GetOffset(); #else m_dCheckpoints[i].m_iWordID = tReader.GetDword(); #endif m_dCheckpoints[i].m_iWordlistOffset = tReader.GetDword(); } } SphOffset_t uMaxChunk = 0; ARRAY_FOREACH ( i, m_dCheckpoints ) { SphOffset_t uNextOffset = ( i+1 )==m_dCheckpoints.GetLength() ? m_iSize : m_dCheckpoints[i+1].m_iWordlistOffset; uMaxChunk = Max ( uMaxChunk, uNextOffset - m_dCheckpoints[i].m_iWordlistOffset ); } assert ( uMaxChunk0 ); for ( ;; ) { // unpack next word // must be in sync with DictEnd()! BYTE uPack = *pBuf++; if ( !uPack ) return NULL; // wordlist chunk is over int iMatch, iDelta; if ( uPack & 0x80 ) { iDelta = ( ( uPack>>4 ) & 7 ) + 1; iMatch = uPack & 15; } else { iDelta = uPack & 127; iMatch = *pBuf++; } assert ( iMatch+iDelta<(int)sizeof(tCtx.m_sWord)-1 ); assert ( iMatch<=(int)strlen ( (char *)tCtx.m_sWord ) ); memcpy ( tCtx.m_sWord + iMatch, pBuf, iDelta ); pBuf += iDelta; tCtx.m_iLen = iMatch + iDelta; tCtx.m_sWord[tCtx.m_iLen] = '\0'; // list is sorted, so if there was no match, there's no such word int iCmpRes = bStarMode ? sphDictCmp ( pStr, iLen, (char*)tCtx.m_sWord, tCtx.m_iLen ) : sphDictCmpStrictly ( pStr, iLen, (char*)tCtx.m_sWord, tCtx.m_iLen ); if ( iCmpRes<0 ) return NULL; const SphOffset_t uOff = sphUnzipOffset ( pBuf ); const int iDocs = sphUnzipInt ( pBuf ); const int iHits = sphUnzipInt ( pBuf ); BYTE uHint = ( iDocs>=DOCLIST_HINT_THRESH ) ? *pBuf++ : 0; // it matches?! if ( iCmpRes==0 && ( !bStarMode || iLen<=tCtx.m_iLen ) ) { tWord.m_sWord = (char*)tCtx.m_sWord; tWord.m_uOff = uOff; tWord.m_iDocs = iDocs; tWord.m_iHits = iHits; tWord.m_iDoclistHint = DoclistHintUnpack ( iDocs, uHint ); return pBuf; } } } bool CWordlist::GetWord ( const BYTE * pBuf, SphWordID_t iWordID, WordDictInfo_t & tWord ) const { SphWordID_t iLastID = 0; SphOffset_t uLastOff = 0; for ( ;; ) { // unpack next word ID const SphWordID_t iDeltaWord = sphUnzipWordid ( pBuf ); // FIXME! slow with 32bit wordids if ( iDeltaWord==0 ) // wordlist chunk is over return false; iLastID += iDeltaWord; // list is sorted, so if there was no match, there's no such word if ( iLastID>iWordID ) return false; // unpack next offset const SphOffset_t iDeltaOffset = sphUnzipOffset ( pBuf ); uLastOff += iDeltaOffset; // unpack doc/hit count const int iDocs = sphUnzipInt ( pBuf ); const int iHits = sphUnzipInt ( pBuf ); assert ( iDeltaOffset ); assert ( iDocs ); assert ( iHits ); // it matches?! if ( iLastID==iWordID ) { sphUnzipWordid ( pBuf ); // might be 0 at checkpoint const SphOffset_t iDoclistLen = sphUnzipOffset ( pBuf ); tWord.m_uOff = uLastOff; tWord.m_iDocs = iDocs; tWord.m_iHits = iHits; tWord.m_iDoclistHint = (int)iDoclistLen; return true; } } } const BYTE * CWordlist::AcquireDict ( const CSphWordlistCheckpoint * pCheckpoint, int iFD, BYTE * pDictBuf ) const { assert ( pCheckpoint ); assert ( m_dCheckpoints.GetLength() ); assert ( pCheckpoint>=m_dCheckpoints.Begin() && pCheckpoint<=&m_dCheckpoints.Last() ); assert ( pCheckpoint->m_iWordlistOffset>0 && pCheckpoint->m_iWordlistOffset<=m_iSize ); assert ( m_pBuf.IsEmpty() || pCheckpoint->m_iWordlistOffset<(int64_t)m_pBuf.GetLength() ); const BYTE * pBuf = NULL; if ( !m_pBuf.IsEmpty() ) pBuf = m_pBuf.GetWritePtr()+pCheckpoint->m_iWordlistOffset; else { assert ( pDictBuf ); SphOffset_t iChunkLength = 0; // not the end? if ( pCheckpoint < &m_dCheckpoints.Last() ) iChunkLength = pCheckpoint[1].m_iWordlistOffset - pCheckpoint->m_iWordlistOffset; else iChunkLength = m_iSize - pCheckpoint->m_iWordlistOffset; if ( (int)sphPread ( iFD, pDictBuf, (size_t)iChunkLength, pCheckpoint->m_iWordlistOffset )==iChunkLength ) pBuf = pDictBuf; } return pBuf; } void CWordlist::GetPrefixedWords ( const char * sWord, int iWordLen, CSphVector & dPrefixedWords, BYTE * pDictBuf, int iFD ) const { assert ( iWordLen>0 ); // empty index? if ( !m_dCheckpoints.GetLength() ) return; const CSphWordlistCheckpoint * pCheckpoint = FindCheckpoint ( sWord, iWordLen, 0, true ); while ( pCheckpoint ) { // decode wordlist chunk const BYTE * pBuf = AcquireDict ( pCheckpoint, iFD, pDictBuf ); assert ( pBuf ); WordReaderContext_t tReaderCtx; while ( pBuf ) { WordDictInfo_t tResWord; pBuf = GetWord ( pBuf, sWord, iWordLen, tResWord, true, tReaderCtx ); if ( pBuf ) { assert ( !tResWord.m_sWord.IsEmpty() ); CSphNamedInt & tPrefixed = dPrefixedWords.Add(); tPrefixed.m_sName = tResWord.m_sWord; // OPTIMIZE? swap mb? tPrefixed.m_iValue = tResWord.m_iDocs; } } pCheckpoint++; if ( pCheckpoint > &m_dCheckpoints.Last() ) break; if ( sphDictCmp ( sWord, iWordLen, pCheckpoint->m_sWord, strlen ( pCheckpoint->m_sWord ) )<0 ) break; } } int CSphStrHashFunc::Hash ( const CSphString & sKey ) { return sKey.IsEmpty() ? 0 : sphCRC32 ( (const BYTE *)sKey.cstr() ); } ////////////////////////////////////////////////////////////////////////// // CSphQueryResultMeta ////////////////////////////////////////////////////////////////////////// CSphQueryResultMeta::CSphQueryResultMeta () : m_iQueryTime ( 0 ) , m_iCpuTime ( 0 ) , m_iMultiplier ( 1 ) , m_iMatches ( 0 ) , m_iTotalMatches ( 0 ) { } void CSphQueryResultMeta::AddStat ( const CSphString & sWord, int64_t iDocs, int64_t iHits, bool bExpanded ) { CSphString sFixed; const CSphString * pFixed = &sWord; if ( sWord.cstr()[0]==MAGIC_WORD_HEAD ) { sFixed = sWord; *(char *)( sFixed.cstr() ) = '*'; pFixed = &sFixed; } else if ( sWord.cstr()[0]==MAGIC_WORD_HEAD_NONSTEMMED ) { if ( !bExpanded ) { sFixed = sWord; *(char *)( sFixed.cstr() ) = '='; pFixed = &sFixed; } else { sFixed = sWord.SubString ( 1, sWord.Length()-1 ); pFixed = &sFixed; } } WordStat_t * pStats = m_hWordStats ( *pFixed ); if ( !pStats ) { CSphQueryResultMeta::WordStat_t tStats; tStats.m_iDocs = iDocs; tStats.m_iHits = iHits; tStats.m_bExpanded = bExpanded; m_hWordStats.Add ( tStats, *pFixed ); } else { pStats->m_iDocs += iDocs; pStats->m_iHits += iHits; pStats->m_bExpanded |= bExpanded; } } CSphQueryResultMeta::CSphQueryResultMeta ( const CSphQueryResultMeta & tMeta ) { *this = tMeta; } CSphQueryResultMeta & CSphQueryResultMeta::operator= ( const CSphQueryResultMeta & tMeta ) { m_iQueryTime = tMeta.m_iQueryTime; m_iCpuTime = tMeta.m_iCpuTime; m_iMultiplier = tMeta.m_iMultiplier; m_iMatches = tMeta.m_iMatches; m_iTotalMatches = tMeta.m_iTotalMatches; m_sError = tMeta.m_sError; m_sWarning = tMeta.m_sWarning; m_hWordStats = tMeta.m_hWordStats; return *this; } // // $Id: sphinx.cpp 3134 2012-03-01 19:34:23Z tomat $ // sphinx-2.0.4-release/src/sphinxstemru.cpp0000644000176700017710000000466711711621267020030 0ustar deogardeogar// // $Id: sphinxstemru.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include ///////////////////////////////////////////////////////////////////////////// // SBCS windows-1251 implementation ///////////////////////////////////////////////////////////////////////////// struct RussianAlphabetWin1251_t { enum { A = 0xE0, B = 0xE1, V = 0xE2, G = 0xE3, D = 0xE4, E = 0xE5, YO = 0xB8, ZH = 0xE6, Z = 0xE7, I = 0xE8, IY = 0xE9, K = 0xEA, L = 0xEB, M = 0xEC, N = 0xED, O = 0xEE, P = 0xEF, R = 0xF0, S = 0xF1, T = 0xF2, U = 0xF3, F = 0xF4, H = 0xF5, TS = 0xF6, CH = 0xF7, SH = 0xF8, SCH = 0xF9, TVY = 0xFA, // TVYordiy znak Y = 0xFB, MYA = 0xFC, // MYAgkiy znak EE = 0xFD, YU = 0xFE, YA = 0xFF }; }; #define LOC_CHAR_TYPE unsigned char #define LOC_PREFIX(_a) _a##_cp1251 #define RUS RussianAlphabetWin1251_t #include "sphinxstemru.inl" ///////////////////////////////////////////////////////////////////////////// // UTF-8 implementation ///////////////////////////////////////////////////////////////////////////// struct RussianAlphabetUTF8_t { enum { A = 0xB0D0U, B = 0xB1D0U, V = 0xB2D0U, G = 0xB3D0U, D = 0xB4D0U, E = 0xB5D0U, YO = 0x91D1U, ZH = 0xB6D0U, Z = 0xB7D0U, I = 0xB8D0U, IY = 0xB9D0U, K = 0xBAD0U, L = 0xBBD0U, M = 0xBCD0U, N = 0xBDD0U, O = 0xBED0U, P = 0xBFD0U, R = 0x80D1U, S = 0x81D1U, T = 0x82D1U, U = 0x83D1U, F = 0x84D1U, H = 0x85D1U, TS = 0x86D1U, CH = 0x87D1U, SH = 0x88D1U, SCH = 0x89D1U, TVY = 0x8AD1U, // TVYordiy znak Y = 0x8BD1U, MYA = 0x8CD1U, // MYAgkiy znak EE = 0x8DD1U, YU = 0x8ED1U, YA = 0x8FD1U }; }; #define LOC_CHAR_TYPE unsigned short #define LOC_PREFIX(_a) _a##_utf8 #define RUS RussianAlphabetUTF8_t #include "sphinxstemru.inl" // NOLINT 2nd include ///////////////////////////////////////////////////////////////////////////// void stem_ru_init () { stem_ru_init_cp1251 (); stem_ru_init_utf8 (); } // // $Id: sphinxstemru.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/src/sphinxql.y0000644000176700017710000005065711674105325016613 0ustar deogardeogar%{ #if USE_WINDOWS #pragma warning(push,1) #pragma warning(disable:4702) // unreachable code #endif %} %lex-param { SqlParser_c * pParser } %parse-param { SqlParser_c * pParser } %pure-parser %error-verbose %token TOK_IDENT %token TOK_ATIDENT %token TOK_CONST_INT %token TOK_CONST_FLOAT %token TOK_CONST_MVA %token TOK_QUOTED_STRING %token TOK_USERVAR %token TOK_SYSVAR %token TOK_CONST_STRINGS %token TOK_AS %token TOK_ASC %token TOK_ATTACH %token TOK_AVG %token TOK_BEGIN %token TOK_BETWEEN %token TOK_BY %token TOK_CALL %token TOK_COLLATION %token TOK_COMMIT %token TOK_COMMITTED %token TOK_COUNT %token TOK_CREATE %token TOK_DELETE %token TOK_DESC %token TOK_DESCRIBE %token TOK_DISTINCT %token TOK_DIV %token TOK_DROP %token TOK_FALSE %token TOK_FLOAT %token TOK_FLUSH %token TOK_FROM %token TOK_FUNCTION %token TOK_GLOBAL %token TOK_GROUP %token TOK_ID %token TOK_IN %token TOK_INDEX %token TOK_INSERT %token TOK_INT %token TOK_INTO %token TOK_ISOLATION %token TOK_LEVEL %token TOK_LIMIT %token TOK_MATCH %token TOK_MAX %token TOK_META %token TOK_MIN %token TOK_MOD %token TOK_NAMES %token TOK_NULL %token TOK_OPTION %token TOK_ORDER %token TOK_RAND %token TOK_READ %token TOK_REPEATABLE %token TOK_REPLACE %token TOK_RETURNS %token TOK_ROLLBACK %token TOK_RTINDEX %token TOK_SELECT %token TOK_SERIALIZABLE %token TOK_SET %token TOK_SESSION %token TOK_SHOW %token TOK_SONAME %token TOK_START %token TOK_STATUS %token TOK_SUM %token TOK_TABLES %token TOK_TO %token TOK_TRANSACTION %token TOK_TRUE %token TOK_UNCOMMITTED %token TOK_UPDATE %token TOK_VALUES %token TOK_VARIABLES %token TOK_WARNINGS %token TOK_WEIGHT %token TOK_WHERE %token TOK_WITHIN %type named_const_list %type udf_type %left TOK_OR %left TOK_AND %left '|' %left '&' %left '=' TOK_NE %left '<' '>' TOK_LTE TOK_GTE %left '+' '-' %left '*' '/' '%' TOK_DIV TOK_MOD %nonassoc TOK_NOT %nonassoc TOK_NEG %{ // some helpers #include // for FLT_MAX %} %% request: statement { pParser->PushQuery(); } | multi_stmt_list | multi_stmt_list ';' ; statement: insert_into | delete_from | set_stmt | set_global_stmt | transact_op | call_proc | describe | show_tables | update | show_variables | show_collation | create_function | drop_function | attach_index | flush_rtindex | set_transaction | select_sysvar ; ////////////////////////////////////////////////////////////////////////// multi_stmt_list: multi_stmt { pParser->PushQuery(); } | multi_stmt_list ';' multi_stmt { pParser->PushQuery(); } ; multi_stmt: select_from | show_stmt ; select_from: TOK_SELECT select_items_list TOK_FROM ident_list opt_where_clause opt_group_clause opt_group_order_clause opt_order_clause opt_limit_clause opt_option_clause { pParser->m_pStmt->m_eStmt = STMT_SELECT; pParser->m_pQuery->m_sIndexes.SetBinary ( pParser->m_pBuf+$4.m_iStart, $4.m_iEnd-$4.m_iStart ); } ; select_items_list: select_item | select_items_list ',' select_item ; select_item: '*' { pParser->AddItem ( &$1 ); } | select_expr opt_alias ; opt_alias: // empty | TOK_IDENT { pParser->AliasLastItem ( &$1 ); } | TOK_AS TOK_IDENT { pParser->AliasLastItem ( &$2 ); } ; select_expr: expr { pParser->AddItem ( &$1 ); } | TOK_AVG '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_AVG, &$1, &$4 ); } | TOK_MAX '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_MAX, &$1, &$4 ); } | TOK_MIN '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_MIN, &$1, &$4 ); } | TOK_SUM '(' expr ')' { pParser->AddItem ( &$3, SPH_AGGR_SUM, &$1, &$4 ); } | TOK_COUNT '(' '*' ')' { if ( !pParser->AddItem ( "count(*)", &$1, &$4 ) ) YYERROR; } | TOK_WEIGHT '(' ')' { if ( !pParser->AddItem ( "weight()", &$1, &$3 ) ) YYERROR; } | TOK_COUNT '(' TOK_DISTINCT TOK_IDENT ')' { if ( !pParser->AddDistinct ( &$4, &$1, &$5 ) ) YYERROR; } ; ident_list: TOK_IDENT | ident_list ',' TOK_IDENT { $$ = $1; $$.m_iEnd = $3.m_iEnd; } ; opt_where_clause: // empty | where_clause ; where_clause: TOK_WHERE where_expr ; where_expr: where_item | where_expr TOK_AND where_item ; where_item: TOK_MATCH '(' TOK_QUOTED_STRING ')' { if ( !pParser->SetMatch($3) ) YYERROR; } | expr_ident '=' const_int { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( $1 ); if ( !pFilter ) YYERROR; pFilter->m_dValues.Add ( $3.m_iValue ); } | expr_ident TOK_NE const_int { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( $1 ); if ( !pFilter ) YYERROR; pFilter->m_dValues.Add ( $3.m_iValue ); pFilter->m_bExclude = true; } | expr_ident TOK_IN '(' const_list ')' { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( $1 ); if ( !pFilter ) YYERROR; pFilter->m_dValues = *$4.m_pValues.Ptr(); pFilter->m_dValues.Sort(); } | expr_ident TOK_NOT TOK_IN '(' const_list ')' { CSphFilterSettings * pFilter = pParser->AddValuesFilter ( $1 ); if ( !pFilter ) YYERROR; pFilter->m_dValues = *$5.m_pValues.Ptr(); pFilter->m_bExclude = true; pFilter->m_dValues.Sort(); } | expr_ident TOK_IN TOK_USERVAR { if ( !pParser->AddUservarFilter ( $1.m_sValue, $3.m_sValue, false ) ) YYERROR; } | expr_ident TOK_NOT TOK_IN TOK_USERVAR { if ( !pParser->AddUservarFilter ( $1.m_sValue, $4.m_sValue, true ) ) YYERROR; } | expr_ident TOK_BETWEEN const_int TOK_AND const_int { if ( !pParser->AddUintRangeFilter ( $1.m_sValue, $3.m_iValue, $5.m_iValue ) ) YYERROR; } | expr_ident '>' const_int { if ( !pParser->AddUintRangeFilter ( $1.m_sValue, $3.m_iValue+1, UINT_MAX ) ) YYERROR; } | expr_ident '<' const_int { if ( !pParser->AddUintRangeFilter ( $1.m_sValue, 0, $3.m_iValue-1 ) ) YYERROR; } | expr_ident TOK_GTE const_int { if ( !pParser->AddUintRangeFilter ( $1.m_sValue, $3.m_iValue, UINT_MAX ) ) YYERROR; } | expr_ident TOK_LTE const_int { if ( !pParser->AddUintRangeFilter ( $1.m_sValue, 0, $3.m_iValue ) ) YYERROR; } | expr_ident '=' const_float | expr_ident TOK_NE const_float | expr_ident '>' const_float | expr_ident '<' const_float { yyerror ( pParser, "only >=, <=, and BETWEEN floating-point filter types are supported in this version" ); YYERROR; } | expr_ident TOK_BETWEEN const_float TOK_AND const_float { if ( !pParser->AddFloatRangeFilter ( $1.m_sValue, $3.m_fValue, $5.m_fValue ) ) YYERROR; } | expr_ident TOK_GTE const_float { if ( !pParser->AddFloatRangeFilter ( $1.m_sValue, $3.m_fValue, FLT_MAX ) ) YYERROR; } | expr_ident TOK_LTE const_float { if ( !pParser->AddFloatRangeFilter ( $1.m_sValue, -FLT_MAX, $3.m_fValue ) ) YYERROR; } ; expr_ident: TOK_IDENT | TOK_ATIDENT { if ( !pParser->SetOldSyntax() ) YYERROR; } | TOK_COUNT '(' '*' ')' { $$.m_sValue = "@count"; if ( !pParser->SetNewSyntax() ) YYERROR; } | TOK_WEIGHT '(' ')' { $$.m_sValue = "@weight"; if ( !pParser->SetNewSyntax() ) YYERROR; } | TOK_ID { $$.m_sValue = "@id"; if ( !pParser->SetNewSyntax() ) YYERROR; } ; const_int: TOK_CONST_INT { $$.m_iInstype = TOK_CONST_INT; $$.m_iValue = $1.m_iValue; } | '-' TOK_CONST_INT { $$.m_iInstype = TOK_CONST_INT; $$.m_iValue = -$2.m_iValue; } ; const_float: TOK_CONST_FLOAT { $$.m_iInstype = TOK_CONST_FLOAT; $$.m_fValue = $1.m_fValue; } | '-' TOK_CONST_FLOAT { $$.m_iInstype = TOK_CONST_FLOAT; $$.m_fValue = -$2.m_fValue; } ; const_list: const_int { assert ( !$$.m_pValues.Ptr() ); $$.m_pValues = new RefcountedVector_c (); $$.m_pValues->Add ( $1.m_iValue ); } | const_list ',' const_int { $$.m_pValues->Add ( $3.m_iValue ); } ; opt_group_clause: // empty | group_clause ; group_clause: TOK_GROUP TOK_BY expr_ident { pParser->m_pQuery->m_eGroupFunc = SPH_GROUPBY_ATTR; pParser->m_pQuery->m_sGroupBy = $3.m_sValue; } ; opt_group_order_clause: // empty | group_order_clause ; group_order_clause: TOK_WITHIN TOK_GROUP TOK_ORDER TOK_BY order_items_list { pParser->m_pQuery->m_sSortBy.SetBinary ( pParser->m_pBuf+$5.m_iStart, $5.m_iEnd-$5.m_iStart ); } ; opt_order_clause: // empty | order_clause ; order_clause: TOK_ORDER TOK_BY order_items_list { pParser->m_pQuery->m_sOrderBy.SetBinary ( pParser->m_pBuf+$3.m_iStart, $3.m_iEnd-$3.m_iStart ); } | TOK_ORDER TOK_BY TOK_RAND '(' ')' { pParser->m_pQuery->m_sOrderBy = "@random"; } ; order_items_list: order_item | order_items_list ',' order_item { $$ = $1; $$.m_iEnd = $3.m_iEnd; } ; order_item: expr_ident | expr_ident TOK_ASC { $$ = $1; $$.m_iEnd = $2.m_iEnd; } | expr_ident TOK_DESC { $$ = $1; $$.m_iEnd = $2.m_iEnd; } ; opt_limit_clause: // empty | limit_clause ; limit_clause: TOK_LIMIT TOK_CONST_INT { pParser->m_pQuery->m_iOffset = 0; pParser->m_pQuery->m_iLimit = $2.m_iValue; } | TOK_LIMIT TOK_CONST_INT ',' TOK_CONST_INT { pParser->m_pQuery->m_iOffset = $2.m_iValue; pParser->m_pQuery->m_iLimit = $4.m_iValue; } ; opt_option_clause: // empty | option_clause ; option_clause: TOK_OPTION option_list ; option_list: option_item | option_list ',' option_item ; option_item: TOK_IDENT '=' TOK_IDENT { if ( !pParser->AddOption ( $1, $3 ) ) YYERROR; } | TOK_IDENT '=' TOK_CONST_INT { if ( !pParser->AddOption ( $1, $3 ) ) YYERROR; } | TOK_IDENT '=' '(' named_const_list ')' { if ( !pParser->AddOption ( $1, pParser->GetNamedVec ( $4 ) ) ) YYERROR; pParser->FreeNamedVec ( $4 ); } | TOK_IDENT '=' TOK_IDENT '(' TOK_QUOTED_STRING ')' { if ( !pParser->AddOption ( $1, $4, $5.m_sValue ) ) YYERROR; } | TOK_IDENT '=' TOK_QUOTED_STRING { if ( !pParser->AddOption ( $1, $3 ) ) YYERROR; } ; named_const_list: named_const { $$ = pParser->AllocNamedVec (); pParser->AddConst ( $$, $1 ); } | named_const_list ',' named_const { pParser->AddConst( $$, $3 ); } ; named_const: TOK_IDENT '=' const_int { $$.m_sValue = $1.m_sValue; $$.m_iValue = $3.m_iValue; } ; ////////////////////////////////////////////////////////////////////////// expr: TOK_IDENT | TOK_ATIDENT { if ( !pParser->SetOldSyntax() ) YYERROR; } | TOK_ID { if ( !pParser->SetNewSyntax() ) YYERROR; } | TOK_CONST_INT | TOK_CONST_FLOAT | TOK_USERVAR | '-' expr %prec TOK_NEG { $$ = $1; $$.m_iEnd = $2.m_iEnd; } | TOK_NOT expr { $$ = $1; $$.m_iEnd = $2.m_iEnd; } | expr '+' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '-' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '*' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '/' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '<' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '>' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '&' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '|' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '%' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_DIV expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_MOD expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_LTE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_GTE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr '=' expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_NE expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_AND expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | expr TOK_OR expr { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | '(' expr ')' { $$ = $1; $$.m_iEnd = $3.m_iEnd; } | function ; function: TOK_IDENT '(' arglist ')' { $$ = $1; $$.m_iEnd = $4.m_iEnd; } | TOK_IN '(' arglist ')' { $$ = $1; $$.m_iEnd = $4.m_iEnd; } // handle exception from 'ident' rule | TOK_IDENT '(' ')' { $$ = $1; $$.m_iEnd = $3.m_iEnd } | TOK_MIN '(' expr ',' expr ')' { $$ = $1; $$.m_iEnd = $6.m_iEnd } // handle clash with aggregate functions | TOK_MAX '(' expr ',' expr ')' { $$ = $1; $$.m_iEnd = $6.m_iEnd } ; arglist: arg | arglist ',' arg ; arg: expr | TOK_QUOTED_STRING ; ////////////////////////////////////////////////////////////////////////// show_stmt: TOK_SHOW show_variable ; show_variable: TOK_WARNINGS { pParser->m_pStmt->m_eStmt = STMT_SHOW_WARNINGS; } | TOK_STATUS { pParser->m_pStmt->m_eStmt = STMT_SHOW_STATUS; } | TOK_META { pParser->m_pStmt->m_eStmt = STMT_SHOW_META; } ; ////////////////////////////////////////////////////////////////////////// set_value: TOK_IDENT | TOK_NULL | TOK_QUOTED_STRING | TOK_CONST_INT | TOK_CONST_FLOAT ; set_stmt: TOK_SET TOK_IDENT '=' boolean_value { pParser->SetStatement ( $2, SET_LOCAL ); pParser->m_pStmt->m_iSetValue = $4.m_iValue; } | TOK_SET TOK_IDENT '=' set_string_value { pParser->SetStatement ( $2, SET_LOCAL ); pParser->m_pStmt->m_sSetValue = $4.m_sValue; } | TOK_SET TOK_IDENT '=' TOK_NULL { pParser->SetStatement ( $2, SET_LOCAL ); pParser->m_pStmt->m_bSetNull = true; } | TOK_SET TOK_NAMES set_value { pParser->m_pStmt->m_eStmt = STMT_DUMMY; } | TOK_SET TOK_SYSVAR '=' set_value { pParser->m_pStmt->m_eStmt = STMT_DUMMY; } ; set_global_stmt: TOK_SET TOK_GLOBAL TOK_USERVAR '=' '(' const_list ')' { pParser->SetStatement ( $3, SET_GLOBAL_UVAR ); pParser->m_pStmt->m_dSetValues = *$6.m_pValues.Ptr(); } | TOK_SET TOK_GLOBAL TOK_IDENT '=' set_string_value { pParser->SetStatement ( $3, SET_GLOBAL_SVAR ); pParser->m_pStmt->m_sSetValue = $5.m_sValue; } ; set_string_value: TOK_IDENT | TOK_QUOTED_STRING ; boolean_value: TOK_TRUE { $$.m_iValue = 1; } | TOK_FALSE { $$.m_iValue = 0; } | const_int { $$.m_iValue = $1.m_iValue; if ( $$.m_iValue!=0 && $$.m_iValue!=1 ) { yyerror ( pParser, "only 0 and 1 could be used as boolean values" ); YYERROR; } } ; ////////////////////////////////////////////////////////////////////////// transact_op: TOK_COMMIT { pParser->m_pStmt->m_eStmt = STMT_COMMIT; } | TOK_ROLLBACK { pParser->m_pStmt->m_eStmt = STMT_ROLLBACK; } | start_transaction { pParser->m_pStmt->m_eStmt = STMT_BEGIN; } ; start_transaction: TOK_BEGIN | TOK_START TOK_TRANSACTION ; ////////////////////////////////////////////////////////////////////////// insert_into: insert_or_replace TOK_INTO TOK_IDENT opt_column_list TOK_VALUES insert_rows_list { // everything else is pushed directly into parser within the rules pParser->m_pStmt->m_sIndex = $3.m_sValue; } ; insert_or_replace: TOK_INSERT { pParser->m_pStmt->m_eStmt = STMT_INSERT; } | TOK_REPLACE { pParser->m_pStmt->m_eStmt = STMT_REPLACE; } ; opt_column_list: // empty | '(' column_list ')' ; column_list: expr_ident { if ( !pParser->AddSchemaItem ( &$1 ) ) { yyerror ( pParser, "unknown field" ); YYERROR; } } | column_list ',' expr_ident { if ( !pParser->AddSchemaItem ( &$3 ) ) { yyerror ( pParser, "unknown field" ); YYERROR; } } ; insert_rows_list: insert_row | insert_rows_list ',' insert_row ; insert_row: '(' insert_vals_list ')' { if ( !pParser->m_pStmt->CheckInsertIntegrity() ) { yyerror ( pParser, "wrong number of values here" ); YYERROR; } } ; insert_vals_list: insert_val { AddInsval ( pParser->m_pStmt->m_dInsertValues, $1 ); } | insert_vals_list ',' insert_val { AddInsval ( pParser->m_pStmt->m_dInsertValues, $3 ); } ; insert_val: const_int { $$.m_iInstype = TOK_CONST_INT; $$.m_iValue = $1.m_iValue; } | const_float { $$.m_iInstype = TOK_CONST_FLOAT; $$.m_fValue = $1.m_fValue; } | TOK_QUOTED_STRING { $$.m_iInstype = TOK_QUOTED_STRING; $$.m_sValue = $1.m_sValue; } | '(' const_list ')' { $$.m_iInstype = TOK_CONST_MVA; $$.m_pValues = $2.m_pValues; } | '(' ')' { $$.m_iInstype = TOK_CONST_MVA; } ; ////////////////////////////////////////////////////////////////////////// delete_from: TOK_DELETE TOK_FROM TOK_IDENT TOK_WHERE TOK_ID '=' const_int { pParser->m_pStmt->m_eStmt = STMT_DELETE; pParser->m_pStmt->m_sIndex = $3.m_sValue; pParser->m_pStmt->m_dDeleteIds.Add ( $7.m_iValue ); } | TOK_DELETE TOK_FROM TOK_IDENT TOK_WHERE TOK_ID TOK_IN '(' const_list ')' { pParser->m_pStmt->m_eStmt = STMT_DELETE; pParser->m_pStmt->m_sIndex = $3.m_sValue; for ( int i=0; i<$8.m_pValues.Ptr()->GetLength(); i++ ) pParser->m_pStmt->m_dDeleteIds.Add ( (*$8.m_pValues.Ptr())[i] ); } ; ////////////////////////////////////////////////////////////////////////// call_proc: TOK_CALL TOK_IDENT '(' call_args_list opt_call_opts_list ')' { pParser->m_pStmt->m_eStmt = STMT_CALL; pParser->m_pStmt->m_sCallProc = $2.m_sValue; } ; call_args_list: call_arg { AddInsval ( pParser->m_pStmt->m_dInsertValues, $1 ); } | call_args_list ',' call_arg { AddInsval ( pParser->m_pStmt->m_dInsertValues, $3 ); } ; call_arg: insert_val | '(' const_string_list ')' { $$.m_iInstype = TOK_CONST_STRINGS; } ; const_string_list: TOK_QUOTED_STRING { // FIXME? for now, one such array per CALL statement, tops if ( pParser->m_pStmt->m_dCallStrings.GetLength() ) { yyerror ( pParser, "unexpected constant string list" ); YYERROR; } pParser->m_pStmt->m_dCallStrings.Add ( $1.m_sValue ); } | const_string_list ',' TOK_QUOTED_STRING { pParser->m_pStmt->m_dCallStrings.Add ( $3.m_sValue ); } ; opt_call_opts_list: // empty | ',' call_opts_list ; call_opts_list: call_opt { assert ( pParser->m_pStmt->m_dCallOptNames.GetLength()==1 ); assert ( pParser->m_pStmt->m_dCallOptValues.GetLength()==1 ); } | call_opts_list ',' call_opt ; call_opt: insert_val opt_as call_opt_name { pParser->m_pStmt->m_dCallOptNames.Add ( $3.m_sValue ); AddInsval ( pParser->m_pStmt->m_dCallOptValues, $1 ); } ; opt_as: // empty | TOK_AS ; call_opt_name: TOK_IDENT | TOK_LIMIT { $$.m_sValue = "limit"; } ; ////////////////////////////////////////////////////////////////////////// describe: describe_tok TOK_IDENT { pParser->m_pStmt->m_eStmt = STMT_DESC; pParser->m_pStmt->m_sIndex = $2.m_sValue; } ; describe_tok: TOK_DESCRIBE | TOK_DESC ; ////////////////////////////////////////////////////////////////////////// show_tables: TOK_SHOW TOK_TABLES { pParser->m_pStmt->m_eStmt = STMT_SHOW_TABLES; } ; ////////////////////////////////////////////////////////////////////////// update: TOK_UPDATE ident_list TOK_SET update_items_list where_clause { if ( !pParser->UpdateStatement ( &$2 ) ) YYERROR; } ; update_items_list: update_item | update_items_list ',' update_item ; update_item: TOK_IDENT '=' const_int { pParser->UpdateAttr ( $1.m_sValue, &$3 ); } | TOK_IDENT '=' const_float { pParser->UpdateAttr ( $1.m_sValue, &$3, SPH_ATTR_FLOAT); } | TOK_IDENT '=' '(' const_list ')' { pParser->UpdateMVAAttr ( $1.m_sValue, $4 ); } | TOK_IDENT '=' '(' ')' // special case () means delete mva { SqlNode_t tNoValues; pParser->UpdateMVAAttr ( $1.m_sValue, tNoValues ); } ; ////////////////////////////////////////////////////////////////////////// show_variables: TOK_SHOW opt_scope TOK_VARIABLES { pParser->m_pStmt->m_eStmt = STMT_SHOW_VARIABLES; } ; show_collation: TOK_SHOW TOK_COLLATION { pParser->m_pStmt->m_eStmt = STMT_DUMMY; } ; set_transaction: TOK_SET opt_scope TOK_TRANSACTION TOK_ISOLATION TOK_LEVEL isolation_level { pParser->m_pStmt->m_eStmt = STMT_DUMMY; } ; opt_scope: | TOK_GLOBAL | TOK_SESSION ; isolation_level: TOK_READ TOK_UNCOMMITTED | TOK_READ TOK_COMMITTED | TOK_REPEATABLE TOK_READ | TOK_SERIALIZABLE ; ////////////////////////////////////////////////////////////////////////// create_function: TOK_CREATE TOK_FUNCTION TOK_IDENT TOK_RETURNS udf_type TOK_SONAME TOK_QUOTED_STRING { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_CREATE_FUNC; tStmt.m_sUdfName = $3.m_sValue; tStmt.m_sUdfLib = $7.m_sValue; tStmt.m_eUdfType = (ESphAttr) $5; } ; udf_type: TOK_INT { $$ = SPH_ATTR_INTEGER; } | TOK_FLOAT { $$ = SPH_ATTR_FLOAT; } ; drop_function: TOK_DROP TOK_FUNCTION TOK_IDENT { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_DROP_FUNC; tStmt.m_sUdfName = $3.m_sValue; } ; //////////////////////////////////////////////////////////// attach_index: TOK_ATTACH TOK_INDEX TOK_IDENT TOK_TO TOK_RTINDEX TOK_IDENT { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_ATTACH_INDEX; tStmt.m_sIndex = $3.m_sValue; tStmt.m_sSetName = $6.m_sValue; } ; ////////////////////////////////////////////////////////////////////////// flush_rtindex: TOK_FLUSH TOK_RTINDEX TOK_IDENT { SqlStmt_t & tStmt = *pParser->m_pStmt; tStmt.m_eStmt = STMT_FLUSH_RTINDEX; tStmt.m_sIndex = $3.m_sValue; } ; ////////////////////////////////////////////////////////////////////////// select_sysvar: TOK_SELECT TOK_SYSVAR opt_limit_clause { pParser->m_pStmt->m_eStmt = STMT_DUMMY; } ; %% #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxsearch.cpp0000644000176700017710000051441211720460762017751 0ustar deogardeogar// // $Id: sphinxsearch.cpp 3111 2012-02-20 15:08:34Z klirichek $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxsearch.h" #include "sphinxquery.h" #include "sphinxint.h" #include ////////////////////////////////////////////////////////////////////////// // EXTENDED MATCHING V2 ////////////////////////////////////////////////////////////////////////// typedef Hitman_c<8> HITMAN; #define SPH_TREE_DUMP 0 #define SPH_BM25_K1 1.2f #define SPH_BM25_SCALE 1000 struct QwordsHash_fn { static inline int Hash ( const CSphString & sKey ) { return sphCRC32 ( (const BYTE *)sKey.cstr() ); } }; void ISphQword::CollectHitMask() { if ( m_bAllFieldsKnown ) return; SeekHitlist ( m_iHitlistPos ); for ( Hitpos_t uHit = GetNextHit(); uHit!=EMPTY_HIT; uHit = GetNextHit() ) m_dQwordFields.Set ( HITMAN::GetField ( uHit ) ); m_bAllFieldsKnown = true; } /// match in the stream struct ExtDoc_t { SphDocID_t m_uDocid; CSphRowitem * m_pDocinfo; ///< for inline storage only SphOffset_t m_uHitlistOffset; DWORD m_uDocFields; float m_fTFIDF; }; /// word in the query struct ExtQword_t { CSphString m_sWord; ///< word CSphString m_sDictWord; ///< word as processed by dict int m_iDocs; ///< matching documents int m_iHits; ///< matching hits float m_fIDF; ///< IDF value int m_iQueryPos; ///< position in the query bool m_bExpanded; ///< added by prefix expansion bool m_bExcluded; ///< excluded by the query (eg. bb in (aa AND NOT bb)) }; /// query words set typedef CSphOrderedHash < ExtQword_t, CSphString, QwordsHash_fn, 256 > ExtQwordsHash_t; /// generic match streamer class ExtNode_i { public: ExtNode_i (); virtual ~ExtNode_i () { SafeDeleteArray ( m_pDocinfo ); } static ExtNode_i * Create ( const XQNode_t * pNode, const ISphQwordSetup & tSetup ); static ExtNode_i * Create ( const XQKeyword_t & tWord, const XQNode_t * pNode, const ISphQwordSetup & tSetup ); static ExtNode_i * Create ( ISphQword * pQword, const XQNode_t * pNode, const ISphQwordSetup & tSetup ); virtual void Reset ( const ISphQwordSetup & tSetup ) = 0; virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ) = 0; virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) = 0; virtual void GetQwords ( ExtQwordsHash_t & hQwords ) = 0; virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) = 0; virtual bool GotHitless () = 0; virtual int GetDocsCount () { return INT_MAX; } void DebugIndent ( int iLevel ) { while ( iLevel-- ) printf ( " " ); } virtual void DebugDump ( int iLevel ) { DebugIndent ( iLevel ); printf ( "ExtNode\n" ); } public: static const int MAX_DOCS = 512; static const int MAX_HITS = 512; int m_iAtomPos; ///< we now need it on this level for tricks like expanded keywords within phrases protected: ExtDoc_t m_dDocs[MAX_DOCS]; ExtHit_t m_dHits[MAX_HITS]; public: SphDocID_t m_uMaxID; int m_iStride; ///< docinfo stride (for inline mode only) protected: CSphRowitem * m_pDocinfo; ///< docinfo storage (for inline mode only) void AllocDocinfo ( const ISphQwordSetup & tSetup ) { if ( tSetup.m_iInlineRowitems ) { m_iStride = tSetup.m_iInlineRowitems; m_pDocinfo = new CSphRowitem [ MAX_DOCS*m_iStride ]; } } protected: inline const ExtDoc_t * ReturnDocsChunk ( int iCount, SphDocID_t * pMaxID ) { assert ( iCount>=0 && iCountm_iDocs; } virtual void DebugDump ( int iLevel ) { DebugIndent ( iLevel ); printf ( "ExtTerm: %s at: %d ", m_pQword->m_sWord.cstr(), m_pQword->m_iAtomPos ); if ( m_dQueriedFields.TestAll(true) ) { printf ( "(all)\n" ); } else { bool bFirst = true; printf ( "in: " ); for ( int iField = 0; iField < CSphSmallBitvec::iTOTALBITS; iField++ ) { if ( m_dQueriedFields.Test ( iField ) ) { if ( !bFirst ) printf ( ", " ); printf ( "%d", iField ); bFirst = false; } } printf ( "\n" ); } } protected: ISphQword * m_pQword; ExtDoc_t * m_pHitDoc; ///< points to entry in m_dDocs which GetHitsChunk() currently emits hits for SphDocID_t m_uHitsOverFor; ///< there are no more hits for matches block starting with this ID CSphSmallBitvec m_dQueriedFields; ///< accepted fields mask bool m_bHasWideFields; ///< whether fields mask for this term refer to fields 32+ float m_fIDF; ///< IDF for this term (might be 0.0f for non-1st occurences in query) int64_t m_iMaxTimer; ///< work until this timestamp CSphString * m_pWarning; const bool m_bNotWeighted; public: static volatile bool m_bInterruptNow; ///< may be set from outside to indicate the globally received sigterm }; /// Immediately interrupt current operation void sphInterruptNow() { ExtTerm_c::m_bInterruptNow = true; } volatile bool ExtTerm_c::m_bInterruptNow = false; /// single keyword streamer with artificial hitlist class ExtTermHitless_c: public ExtTerm_c { public: ExtTermHitless_c ( ISphQword * pQword, const CSphSmallBitvec& uFields, const ISphQwordSetup & tSetup, bool bNotWeighted ) : ExtTerm_c ( pQword, uFields, tSetup, bNotWeighted ) , m_uFieldPos ( 0 ) {} virtual void Reset ( const ISphQwordSetup & ) { m_uFieldPos = 0; } virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual bool GotHitless () { return true; } protected: DWORD m_uFieldPos; }; ////////////////////////////////////////////////////////////////////////// /// per-document zone information (span start/end positions) struct ZoneInfo_t { CSphVector m_dStarts; CSphVector m_dEnds; }; /// zone hash key, zoneid+docid struct ZoneKey_t { int m_iZone; SphDocID_t m_uDocid; explicit ZoneKey_t ( int iZone=0, SphDocID_t uDocid=0 ) : m_iZone ( iZone ) , m_uDocid ( uDocid ) {} bool operator == ( const ZoneKey_t & rhs ) const { return m_iZone==rhs.m_iZone && m_uDocid==rhs.m_uDocid; } }; /// zone hashing function struct ZoneHash_fn { static inline int Hash ( const ZoneKey_t & tKey ) { return (DWORD)tKey.m_uDocid ^ ( tKey.m_iZone<<16 ); } }; /// zone hash typedef CSphOrderedHash < ZoneInfo_t, ZoneKey_t, ZoneHash_fn, 4096 > ZoneHash_c; /// single keyword streamer, with term position filtering template < TermPosFilter_e T > class ExtTermPos_c : public ExtTerm_c { public: ExtTermPos_c ( ISphQword * pQword, const XQNode_t * pNode, const ISphQwordSetup & tSetup ); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual bool GotHitless () { return false; } protected: inline bool IsAcceptableHit ( const ExtHit_t * pHit ) const; protected: int m_iMaxFieldPos; SphDocID_t m_uTermMaxID; const ExtDoc_t * m_pRawDocs; ///< chunk start as returned by raw GetDocsChunk() (need to store it for raw GetHitsChunk() calls) const ExtDoc_t * m_pRawDoc; ///< current position in raw docs chunk const ExtHit_t * m_pRawHit; ///< current position in raw hits chunk SphDocID_t m_uLastID; enum { COPY_FILTERED, COPY_TRAILING, COPY_DONE } m_eState; ///< internal GetHitsChunk() state (are we copying from my hits, or passing trailing raw hits, or done) ExtDoc_t m_dMyDocs[MAX_DOCS]; ///< all documents within the required pos range ExtHit_t m_dMyHits[MAX_HITS]; ///< all hits within the required pos range ExtHit_t m_dFilteredHits[MAX_HITS]; ///< hits from requested subset of the documents (for GetHitsChunk()) SphDocID_t m_uDoneFor; ISphZoneCheck * m_pZoneChecker; ///< zone-limited searches query ranker about zones mutable CSphVector m_dZones; ///< zone ids for this particular term mutable SphDocID_t m_uLastZonedId; mutable int m_iCheckFrom; }; /// multi-node binary-operation streamer traits class ExtTwofer_c : public ExtNode_i { public: ExtTwofer_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const ISphQwordSetup & tSetup ); ~ExtTwofer_c (); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ); virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); virtual bool GotHitless () { return m_pChildren[0]->GotHitless() || m_pChildren[1]->GotHitless(); } void DebugDumpT ( const char * sName, int iLevel ) { DebugIndent ( iLevel ); printf ( "%s:\n", sName ); m_pChildren[0]->DebugDump ( iLevel+1 ); m_pChildren[1]->DebugDump ( iLevel+1 ); } void SetNodePos ( WORD uPosLeft, WORD uPosRight ) { m_dNodePos[0] = uPosLeft; m_dNodePos[1] = uPosRight; m_bPosAware = true; } protected: ExtNode_i * m_pChildren[2]; const ExtDoc_t * m_pCurDoc[2]; const ExtHit_t * m_pCurHit[2]; WORD m_dNodePos[2]; bool m_bPosAware; SphDocID_t m_uMatchedDocid; }; /// A-and-B streamer class ExtAnd_c : public ExtTwofer_c { public: ExtAnd_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const ISphQwordSetup & tSetup ) : ExtTwofer_c ( pFirst, pSecond, tSetup ) {} virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); void DebugDump ( int iLevel ) { DebugDumpT ( "ExtAnd", iLevel ); } }; /// A-or-B streamer class ExtOr_c : public ExtTwofer_c { public: ExtOr_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const ISphQwordSetup & tSetup ) : ExtTwofer_c ( pFirst, pSecond, tSetup ) {} virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); void DebugDump ( int iLevel ) { DebugDumpT ( "ExtOr", iLevel ); } }; /// A-and-not-B streamer class ExtAndNot_c : public ExtTwofer_c { public: ExtAndNot_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const ISphQwordSetup & tSetup ); virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual void Reset ( const ISphQwordSetup & tSetup ); void DebugDump ( int iLevel ) { DebugDumpT ( "ExtAndNot", iLevel ); } protected: bool m_bPassthrough; }; /// generic operator over N nodes class ExtNWayT : public ExtNode_i { public: ExtNWayT ( const CSphVector & dNodes, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ); ~ExtNWayT (); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ); virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); virtual bool GotHitless () { return false; } protected: ExtNode_i * m_pNode; ///< my and-node for all the terms const ExtDoc_t * m_pDocs; ///< current docs chunk from and-node SphDocID_t m_uDocsMaxID; ///< max id in current docs chunk const ExtHit_t * m_pHits; ///< current hits chunk from and-node const ExtDoc_t * m_pDoc; ///< current doc from and-node const ExtHit_t * m_pHit; ///< current hit from and-node const ExtDoc_t * m_pMyDoc; ///< current doc for hits getter const ExtHit_t * m_pMyHit; ///< current hit for hits getter SphDocID_t m_uLastDocID; ///< last emitted hit ExtHit_t m_dMyHits[MAX_HITS]; ///< buffer for all my phrase hits; inherited m_dHits will receive filtered results SphDocID_t m_uMatchedDocid; ///< doc currently in process SphDocID_t m_uHitsOverFor; ///< there are no more hits for matches block starting with this ID protected: inline void ConstructNode ( const CSphVector & dNodes, const CSphVector & dPositions, const ISphQwordSetup & tSetup ) { assert ( m_pNode==NULL ); WORD uLPos = dPositions[0]; ExtNode_i * pCur = dNodes[uLPos++]; // ++ for zero-based to 1-based ExtAnd_c * pCurEx = NULL; DWORD uLeaves = dNodes.GetLength(); WORD uRPos; for ( DWORD i=1; iSetNodePos ( uLPos, uRPos ); uLPos = 0; } m_pNode = pCur; } }; struct ExtNodeTF_fn { bool IsLess ( ExtNode_i * pA, ExtNode_i * pB ) const { return pA->GetDocsCount() < pB->GetDocsCount(); } }; struct ExtNodeTFExt_fn { const CSphVector & m_dNodes; explicit ExtNodeTFExt_fn ( const CSphVector & dNodes ) : m_dNodes ( dNodes ) {} ExtNodeTFExt_fn ( const ExtNodeTFExt_fn & rhs ) : m_dNodes ( rhs.m_dNodes ) {} bool IsLess ( WORD uA, WORD uB ) const { return m_dNodes[uA]->GetDocsCount() < m_dNodes[uB]->GetDocsCount(); } private: const ExtNodeTFExt_fn & operator = ( const ExtNodeTFExt_fn & ) { return *this; } }; template < class FSM > class ExtNWay_c : public ExtNWayT, private FSM { public: ExtNWay_c ( const CSphVector & dNodes, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ) : ExtNWayT ( dNodes, uDupeMask, tNode, tSetup ) , FSM ( dNodes, uDupeMask, tNode, tSetup ) { bool bTerms = FSM::bTermsTree; // workaround MSVC const condition warning CSphVector dPositions ( dNodes.GetLength() ); ARRAY_FOREACH ( i, dPositions ) dPositions[i] = (WORD) i; if ( bTerms ) dPositions.Sort ( ExtNodeTFExt_fn ( dNodes ) ); ConstructNode ( dNodes, dPositions, tSetup ); } public: virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual void DebugDump ( int iLevel ) { DebugIndent ( iLevel ); printf ( "%s\n", FSM::GetName() ); m_pNode->DebugDump ( iLevel+1 ); } private: bool EmitTail ( int & iHit ); ///< the "trickiest part" extracted in order to process the proximity also }; class FSMphrase { protected: FSMphrase ( const CSphVector & dQwords, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ); inline void ResetFSM() { m_uExpPos = 0; m_uExpQpos = 0; } bool HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ); inline static const char* GetName() { return "ExtPhrase"; } static const bool bTermsTree = true; ///< we work with ExtTerm nodes protected: DWORD m_uExpQpos; CSphVector m_dQposDelta; ///< next expected qpos delta for each existing qpos (for skipped stopwords case) DWORD m_uMinQpos; DWORD m_uMaxQpos; DWORD m_uExpPos; DWORD m_uLeaves; ///< number of keywords (might be different from qpos delta because of stops and overshorts) }; /// exact phrase streamer typedef ExtNWay_c < FSMphrase > ExtPhrase_c; /// proximity streamer class FSMproximity { protected: FSMproximity ( const CSphVector & dQwords, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ); inline void ResetFSM() { m_uExpPos = 0; m_uWords = 0; m_iMinQindex = -1; ARRAY_FOREACH ( i, m_dProx ) m_dProx[i] = UINT_MAX; } bool HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ); inline static const char* GetName() { return "ExtProximity"; } static const bool bTermsTree = true; ///< we work with ExtTerm nodes protected: int m_iMaxDistance; DWORD m_uWordsExpected; DWORD m_uMinQpos; DWORD m_uQLen; DWORD m_uExpPos; CSphVector m_dProx; // proximity hit position for i-th word CSphVector m_dDeltas; // used for weight calculation DWORD m_uWords; int m_iMinQindex; }; /// exact phrase streamer typedef ExtNWay_c ExtProximity_c; /// proximity streamer class FSMmultinear { protected: FSMmultinear ( const CSphVector & dNodes, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ); inline void ResetFSM() { m_iRing = m_uLastP = m_uPrelastP = 0; } bool HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ); inline static const char* GetName() { return "ExtMultinear"; } static const bool bTermsTree = true; ///< we work with generic (not just ExtTerm) nodes protected: int m_iNear; ///< the NEAR distance DWORD m_uPrelastP; DWORD m_uPrelastML; DWORD m_uPrelastSL; DWORD m_uPrelastW; DWORD m_uLastP; ///< position of the last hit DWORD m_uLastML; ///< the length of the previous hit DWORD m_uLastSL; ///< the length of the previous hit in Query DWORD m_uLastW; ///< last weight DWORD m_uWordsExpected; ///< now many hits we're expect DWORD m_uWeight; ///< weight accum DWORD m_uFirstHit; ///< hitpos of the beginning of the match chain WORD m_uFirstNpos; ///< N-position of the head of the chain WORD m_uFirstQpos; ///< Q-position of the head of the chain (for twofers) CSphVector m_dNpos; ///< query positions for multinear CSphVector m_dRing; ///< ring buffer for multihit data int m_iRing; ///< the head of the ring bool m_bTwofer; ///< if we have 2- or N-way NEAR private: inline int RingTail() const { return ( m_iRing + m_dNpos.GetLength() - 1 ) % m_uWordsExpected; } inline void Add2Ring ( const ExtHit_t* pHit ) { if ( !m_bTwofer ) m_dRing [ RingTail() ] = *pHit; } inline void ShiftRing() { if ( ++m_iRing==(int)m_uWordsExpected ) m_iRing=0; } }; /// exact phrase streamer typedef ExtNWay_c ExtMultinear_c; /// quorum streamer class ExtQuorum_c : public ExtNode_i { public: ExtQuorum_c ( CSphVector & dQwords, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & tSetup ); virtual ~ExtQuorum_c (); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ); virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); virtual bool GotHitless () { return false; } protected: int m_iThresh; ///< keyword count threshold CSphVector m_dChildren; ///< my children nodes (simply ExtTerm_c for now) CSphVector m_pCurDoc; ///< current positions into children doclists CSphVector m_pCurHit; ///< current positions into children hitlists DWORD m_uMask; ///< mask of nodes that count toward threshold DWORD m_uMaskEnd; ///< index of the last bit in mask bool m_bDone; ///< am i done SphDocID_t m_uMatchedDocid; ///< current docid for hitlist emission private: DWORD m_uInitialMask; ///< backup mask for Reset() CSphVector m_dInitialChildren; ///< my children nodes (simply ExtTerm_c for now) }; /// A-B-C-in-this-order streamer class ExtOrder_c : public ExtNode_i { public: ExtOrder_c ( const CSphVector & dChildren, const ISphQwordSetup & tSetup ); ~ExtOrder_c (); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ); virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); virtual bool GotHitless () { return false; } protected: CSphVector m_dChildren; CSphVector m_pDocsChunk; ///< last document chunk (for hit fetching) CSphVector m_pDocs; ///< current position in document chunk CSphVector m_pHits; ///< current position in hits chunk CSphVector m_dMaxID; ///< max DOCID from the last chunk ExtHit_t m_dMyHits[MAX_HITS]; ///< buffer for all my phrase hits; inherited m_dHits will receive filtered results bool m_bDone; SphDocID_t m_uHitsOverFor; protected: int GetNextHit ( SphDocID_t uDocid ); ///< get next hit within given document, and return its child-id int GetMatchingHits ( SphDocID_t uDocid, ExtHit_t * pHitbuf, int iLimit ); ///< process candidate hits and stores actual matches while we can }; /// same-text-unit streamer /// (aka, A and B within same sentence, or same paragraph) class ExtUnit_c : public ExtNode_i { public: ExtUnit_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const CSphSmallBitvec& dFields, const ISphQwordSetup & tSetup, const char * sUnit ); ~ExtUnit_c (); virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ); virtual void Reset ( const ISphQwordSetup & tSetup ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ); virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); public: virtual bool GotHitless () { return false; } virtual void DebugDump ( int iLevel ) { DebugIndent ( iLevel ); printf ( "ExtSentence\n" ); m_pArg1->DebugDump ( iLevel+1 ); m_pArg2->DebugDump ( iLevel+1 ); } protected: inline const ExtDoc_t * ReturnDocsChunk ( int iDocs, int iMyHit, SphDocID_t * pMaxID ) { assert ( iMyHitGetQwords ( hQwords ); } virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ); virtual bool InitState ( const CSphQueryContext &, CSphString & ) { return true; } public: // FIXME? hide and friend? virtual SphZoneHit_e IsInZone ( int iZone, const ExtHit_t * pHit ); public: CSphMatch m_dMatches[ExtNode_i::MAX_DOCS]; ///< exposed for caller DWORD m_uPayloadMask; ///< exposed for ranker state functors int m_iQwords; ///< exposed for ranker state functors int m_iMaxQuerypos; ///< exposed for ranker state functors protected: int m_iInlineRowitems; ExtNode_i * m_pRoot; const ExtDoc_t * m_pDoclist; const ExtHit_t * m_pHitlist; SphDocID_t m_uMaxID; ExtDoc_t m_dMyDocs[ExtNode_i::MAX_DOCS]; ///< my local documents pool; for filtering CSphMatch m_dMyMatches[ExtNode_i::MAX_DOCS]; ///< my local matches pool; for filtering CSphMatch m_tTestMatch; const CSphIndex * m_pIndex; ///< this is he who'll do my filtering! CSphQueryContext * m_pCtx; protected: CSphVector m_dZones; CSphVector m_dZoneStartTerm; CSphVector m_dZoneEndTerm; CSphVector m_dZoneStart; CSphVector m_dZoneEnd; CSphVector m_dZoneMax; ///< last docid we (tried) to cache CSphVector m_dZoneMin; ///< first docid we (tried) to cache ZoneHash_c m_hZoneInfo; }; STATIC_ASSERT ( ( 8*8*sizeof(DWORD) )>=SPH_MAX_FIELDS, PAYLOAD_MASK_OVERFLOW ); static const bool WITH_BM25 = true; template < bool USE_BM25 = false > class ExtRanker_WeightSum_c : public ExtRanker_c { protected: int m_iWeights; const int * m_pWeights; public: ExtRanker_WeightSum_c ( const XQQuery_t & tXQ, const ISphQwordSetup & tSetup ) : ExtRanker_c ( tXQ, tSetup ) {} virtual int GetMatches (); virtual bool InitState ( const CSphQueryContext & tCtx, CSphString & ) { m_iWeights = tCtx.m_iWeights; m_pWeights = tCtx.m_dWeights; return true; } }; class ExtRanker_None_c : public ExtRanker_c { public: ExtRanker_None_c ( const XQQuery_t & tXQ, const ISphQwordSetup & tSetup ) : ExtRanker_c ( tXQ, tSetup ) {} virtual int GetMatches (); }; template < typename STATE > class ExtRanker_T : public ExtRanker_c { protected: STATE m_tState; public: ExtRanker_T ( const XQQuery_t & tXQ, const ISphQwordSetup & tSetup ) : ExtRanker_c ( tXQ, tSetup ) {} virtual int GetMatches (); virtual bool InitState ( const CSphQueryContext & tCtx, CSphString & sError ) { return m_tState.Init ( tCtx.m_iWeights, &tCtx.m_dWeights[0], this, sError ); } }; ////////////////////////////////////////////////////////////////////////// static inline void CopyExtDocinfo ( ExtDoc_t & tDst, const ExtDoc_t & tSrc, CSphRowitem ** ppRow, int iStride ) { if ( tSrc.m_pDocinfo ) { assert ( ppRow && *ppRow ); memcpy ( *ppRow, tSrc.m_pDocinfo, iStride*sizeof(CSphRowitem) ); tDst.m_pDocinfo = *ppRow; *ppRow += iStride; } else tDst.m_pDocinfo = NULL; } static inline void CopyExtDoc ( ExtDoc_t & tDst, const ExtDoc_t & tSrc, CSphRowitem ** ppRow, int iStride ) { tDst = tSrc; CopyExtDocinfo ( tDst, tSrc, ppRow, iStride ); } ExtNode_i::ExtNode_i () : m_iAtomPos(0) , m_uMaxID(0) , m_iStride(0) , m_pDocinfo(NULL) { m_dDocs[0].m_uDocid = DOCID_MAX; m_dHits[0].m_uDocid = DOCID_MAX; } static ISphQword * CreateQueryWord ( const XQKeyword_t & tWord, const ISphQwordSetup & tSetup ) { BYTE sTmp [ 3*SPH_MAX_WORD_LEN + 16 ]; strncpy ( (char*)sTmp, tWord.m_sWord.cstr(), sizeof(sTmp) ); sTmp[sizeof(sTmp)-1] = '\0'; ISphQword * pWord = tSetup.QwordSpawn ( tWord ); pWord->m_sWord = tWord.m_sWord; pWord->m_iWordID = tSetup.m_pDict->GetWordID ( sTmp ); pWord->m_sDictWord = (char*)sTmp; pWord->m_bExpanded = tWord.m_bExpanded; tSetup.QwordSetup ( pWord ); if ( tWord.m_bFieldStart && tWord.m_bFieldEnd ) pWord->m_iTermPos = TERM_POS_FIELD_STARTEND; else if ( tWord.m_bFieldStart ) pWord->m_iTermPos = TERM_POS_FIELD_START; else if ( tWord.m_bFieldEnd ) pWord->m_iTermPos = TERM_POS_FIELD_END; else pWord->m_iTermPos = 0; pWord->m_iAtomPos = tWord.m_iAtomPos; return pWord; } static bool KeywordsEqual ( const XQNode_t * pA, const XQNode_t * pB ) { // we expected a keyword here but got composite node; lets drill down until first real keyword while ( pA->m_dChildren.GetLength() ) pA = pA->m_dChildren[0]; while ( pB->m_dChildren.GetLength() ) pB = pB->m_dChildren[0]; // actually check keywords assert ( pA->m_dWords.GetLength() ); assert ( pB->m_dWords.GetLength() ); return pA->m_dWords[0].m_sWord==pB->m_dWords[0].m_sWord; } static DWORD CalcDupeMask ( const CSphVector & dChildren ) { DWORD uDupeMask = 0; ARRAY_FOREACH ( i, dChildren ) { int iValue = 1; for ( int j = i+1; j & dQwordsHit ) { DWORD uDupeMask = 0; ARRAY_FOREACH ( i, dQwordsHit ) { int iValue = 1; for ( int j = i + 1; j < dQwordsHit.GetLength(); j++ ) if ( dQwordsHit[i]->m_iWordID==dQwordsHit[j]->m_iWordID ) { iValue = 0; break; } uDupeMask |= iValue << i; } return uDupeMask; } template < typename T, bool NEED_MASK > static ExtNode_i * CreateMultiNode ( const XQNode_t * pQueryNode, const ISphQwordSetup & tSetup, bool bNeedsHitlist ) { /////////////////////////////////// // virtually plain (expanded) node /////////////////////////////////// if ( pQueryNode->m_dChildren.GetLength() ) { CSphVector dNodes; ARRAY_FOREACH ( i, pQueryNode->m_dChildren ) { dNodes.Add ( ExtNode_i::Create ( pQueryNode->m_dChildren[i], tSetup ) ); assert ( dNodes.Last()->m_iAtomPos>=0 ); } // compute dupe mask (needed for quorum only) // FIXME! this check will fail with wordforms and stuff; sorry, no wordforms vs expand vs quorum support for now! DWORD uDupeMask = NEED_MASK ? CalcDupeMask ( pQueryNode->m_dChildren ) : 0; ExtNode_i * pResult = new T ( dNodes, uDupeMask, *pQueryNode, tSetup ); if ( pQueryNode->GetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pResult, pQueryNode, tSetup ); return pResult; // FIXME! sorry, no hitless vs expand vs phrase support for now! } ////////////////////// // regular plain node ////////////////////// ExtNode_i * pResult = NULL; CSphVector dQwordsHit; // have hits CSphVector dQwords; // don't have hits // partition phrase words const CSphVector & dWords = pQueryNode->m_dWords; ARRAY_FOREACH ( i, dWords ) { ISphQword * pWord = CreateQueryWord ( dWords[i], tSetup ); if ( pWord->m_bHasHitlist || !bNeedsHitlist ) dQwordsHit.Add ( pWord ); else dQwords.Add ( pWord ); } // see if we can create the node if ( dQwordsHit.GetLength()<2 ) { ARRAY_FOREACH ( i, dQwords ) SafeDelete ( dQwords[i] ); ARRAY_FOREACH ( i, dQwordsHit ) SafeDelete ( dQwordsHit[i] ); if ( tSetup.m_pWarning ) tSetup.m_pWarning->SetSprintf ( "can't create phrase node, hitlists unavailable (hitlists=%d, nodes=%d)", dQwordsHit.GetLength(), dWords.GetLength() ); return NULL; } else { // at least two words have hitlists, creating phrase node assert ( pQueryNode ); assert ( pQueryNode->m_dWords.GetLength() ); assert ( pQueryNode->GetOp()==SPH_QUERY_PHRASE || pQueryNode->GetOp()==SPH_QUERY_PROXIMITY || pQueryNode->GetOp()==SPH_QUERY_QUORUM ); // create nodes CSphVector dNodes; ARRAY_FOREACH ( i, dQwordsHit ) { dNodes.Add ( ExtNode_i::Create ( dQwordsHit[i], pQueryNode, tSetup ) ); dNodes.Last()->m_iAtomPos = dQwordsHit[i]->m_iAtomPos; } // compute dupe mask (needed for quorum only) DWORD uDupeMask = NEED_MASK ? CalcDupeMask ( dQwordsHit ) : 0; pResult = new T ( dNodes, uDupeMask, *pQueryNode, tSetup ); } // AND result with the words that had no hitlist if ( dQwords.GetLength() ) { ExtNode_i * pNode = ExtNode_i::Create ( dQwords[0], pQueryNode, tSetup ); for ( int i=1; iGetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pResult, pQueryNode, tSetup ); return pResult; } static ExtNode_i * CreateOrderNode ( const XQNode_t * pNode, const ISphQwordSetup & tSetup ) { if ( pNode->m_dChildren.GetLength()<2 ) { if ( tSetup.m_pWarning ) tSetup.m_pWarning->SetSprintf ( "order node requires at least two children" ); return NULL; } CSphVector dChildren; ARRAY_FOREACH ( i, pNode->m_dChildren ) { ExtNode_i * pChild = ExtNode_i::Create ( pNode->m_dChildren[i], tSetup ); if ( pChild->GotHitless() ) { if ( tSetup.m_pWarning ) tSetup.m_pWarning->SetSprintf ( "failed to create order node, hitlist unavailable" ); ARRAY_FOREACH ( j, dChildren ) SafeDelete ( dChildren[j] ); return NULL; } dChildren.Add ( pChild ); } ExtNode_i * pResult = new ExtOrder_c ( dChildren, tSetup ); if ( pNode->GetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pResult, pNode, tSetup ); return pResult; } ExtNode_i * ExtNode_i::Create ( const XQKeyword_t & tWord, const XQNode_t * pNode, const ISphQwordSetup & tSetup ) { return Create ( CreateQueryWord ( tWord, tSetup ), pNode, tSetup ); }; ExtNode_i * ExtNode_i::Create ( ISphQword * pQword, const XQNode_t * pNode, const ISphQwordSetup & tSetup ) { assert ( pQword ); if ( pNode->m_dSpec.m_iFieldMaxPos ) pQword->m_iTermPos = TERM_POS_FIELD_LIMIT; if ( pNode->m_dSpec.m_dZones.GetLength() ) pQword->m_iTermPos = TERM_POS_ZONES; if ( !pQword->m_bHasHitlist ) { if ( tSetup.m_pWarning && pQword->m_iTermPos ) tSetup.m_pWarning->SetSprintf ( "hitlist unavailable, position limit ignored" ); return new ExtTermHitless_c ( pQword, pNode->m_dSpec.m_dFieldMask, tSetup, pNode->m_bNotWeighted ); } switch ( pQword->m_iTermPos ) { case TERM_POS_FIELD_STARTEND: return new ExtTermPos_c ( pQword, pNode, tSetup ); case TERM_POS_FIELD_START: return new ExtTermPos_c ( pQword, pNode, tSetup ); case TERM_POS_FIELD_END: return new ExtTermPos_c ( pQword, pNode, tSetup ); case TERM_POS_FIELD_LIMIT: return new ExtTermPos_c ( pQword, pNode, tSetup ); case TERM_POS_ZONES: return new ExtTermPos_c ( pQword, pNode, tSetup ); default: return new ExtTerm_c ( pQword, pNode->m_dSpec.m_dFieldMask, tSetup, pNode->m_bNotWeighted ); } } ExtNode_i * ExtNode_i::Create ( const XQNode_t * pNode, const ISphQwordSetup & tSetup ) { // empty node? if ( pNode->IsEmpty() ) return NULL; if ( pNode->m_dWords.GetLength() || pNode->m_bVirtuallyPlain ) { const int iWords = pNode->m_bVirtuallyPlain ? pNode->m_dChildren.GetLength() : pNode->m_dWords.GetLength(); if ( iWords==1 ) { if ( pNode->m_bVirtuallyPlain ) return Create ( pNode->m_dChildren[0], tSetup ); else return Create ( pNode->m_dWords[0], pNode, tSetup ); } switch ( pNode->GetOp() ) { case SPH_QUERY_PHRASE: return CreateMultiNode ( pNode, tSetup, true ); case SPH_QUERY_PROXIMITY: return CreateMultiNode ( pNode, tSetup, true ); case SPH_QUERY_NEAR: return CreateMultiNode ( pNode, tSetup, true ); case SPH_QUERY_QUORUM: { assert ( pNode->m_dWords.GetLength()==0 || pNode->m_dChildren.GetLength()==0 ); int iQuorumCount = pNode->m_dWords.GetLength()+pNode->m_dChildren.GetLength(); if ( pNode->m_iOpArg>=iQuorumCount ) { // threshold is too high if ( tSetup.m_pWarning ) tSetup.m_pWarning->SetSprintf ( "quorum threshold too high (words=%d, thresh=%d); replacing quorum operator with AND operator", iQuorumCount, pNode->m_iOpArg ); } else if ( iQuorumCount>32 ) { // right now quorum can only handle 32 words if ( tSetup.m_pWarning ) tSetup.m_pWarning->SetSprintf ( "too many words (%d) for quorum; replacing with an AND", iQuorumCount ); } else // everything is ok; create quorum node return CreateMultiNode ( pNode, tSetup, false ); // couldn't create quorum, make an AND node instead CSphVector dTerms; dTerms.Reserve ( iQuorumCount ); ARRAY_FOREACH ( i, pNode->m_dWords ) dTerms.Add ( Create ( pNode->m_dWords[i], pNode, tSetup ) ); ARRAY_FOREACH ( i, pNode->m_dChildren ) dTerms.Add ( Create ( pNode->m_dChildren[i], tSetup ) ); // make not simple, but optimized AND node. dTerms.Sort ( ExtNodeTF_fn() ); ExtNode_i * pCur = dTerms[0]; for ( int i=1; iGetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pCur, pNode, tSetup ); return pCur; } default: assert ( 0 && "unexpected plain node type" ); return NULL; } } else { int iChildren = pNode->m_dChildren.GetLength (); assert ( iChildren>0 ); // special case, operator BEFORE if ( pNode->GetOp()==SPH_QUERY_BEFORE ) return CreateOrderNode ( pNode, tSetup ); // special case, AND over terms (internally reordered for speed) bool bAndTerms = ( pNode->GetOp()==SPH_QUERY_AND ); for ( int i=0; im_dChildren[i]; bAndTerms = ( pChildren->m_dWords.GetLength()==1 ); } if ( bAndTerms ) { CSphVector dTerms; for ( int i=0; im_dChildren[i]; dTerms.Add ( ExtNode_i::Create ( pChild, tSetup ) ); } dTerms.Sort ( ExtNodeTF_fn() ); ExtNode_i * pCur = dTerms[0]; for ( int i=1; iGetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pCur, pNode, tSetup ); return pCur; } // Multinear could be also non-plain, so here is the second entry for it. if ( pNode->GetOp()==SPH_QUERY_NEAR ) return CreateMultiNode ( pNode, tSetup, true ); // generic create ExtNode_i * pCur = NULL; for ( int i=0; im_dChildren[i], tSetup ); if ( !pNext ) continue; if ( !pCur ) { pCur = pNext; continue; } switch ( pNode->GetOp() ) { case SPH_QUERY_OR: pCur = new ExtOr_c ( pCur, pNext, tSetup ); break; case SPH_QUERY_AND: pCur = new ExtAnd_c ( pCur, pNext, tSetup ); break; case SPH_QUERY_ANDNOT: pCur = new ExtAndNot_c ( pCur, pNext, tSetup ); break; case SPH_QUERY_SENTENCE: pCur = new ExtUnit_c ( pCur, pNext, pNode->m_dSpec.m_dFieldMask, tSetup, MAGIC_WORD_SENTENCE ); break; case SPH_QUERY_PARAGRAPH: pCur = new ExtUnit_c ( pCur, pNext, pNode->m_dSpec.m_dFieldMask, tSetup, MAGIC_WORD_PARAGRAPH ); break; default: assert ( 0 && "internal error: unhandled op in ExtNode_i::Create()" ); break; } } if ( pNode->GetCount() ) return tSetup.m_pNodeCache->CreateProxy ( pCur, pNode, tSetup ); return pCur; } } ////////////////////////////////////////////////////////////////////////// ExtTerm_c::ExtTerm_c ( ISphQword * pQword, const CSphSmallBitvec & dFields, const ISphQwordSetup & tSetup, bool bNotWeighted ) : m_pQword ( pQword ) , m_pWarning ( tSetup.m_pWarning ) , m_bNotWeighted ( bNotWeighted ) { m_iAtomPos = pQword->m_iAtomPos; m_pHitDoc = NULL; m_uHitsOverFor = 0; m_dQueriedFields = dFields; m_bHasWideFields = false; if ( tSetup.m_pIndex && tSetup.m_pIndex->GetMatchSchema().m_dFields.GetLength()>32 ) for ( int i=1; i<8; i++ ) if ( m_dQueriedFields.m_dFieldsMask[i] ) m_bHasWideFields = true; m_iMaxTimer = tSetup.m_iMaxTimer; AllocDocinfo ( tSetup ); } ExtTerm_c::ExtTerm_c ( ISphQword * pQword, const ISphQwordSetup & tSetup ) : m_pQword ( pQword ) , m_pWarning ( tSetup.m_pWarning ) , m_bNotWeighted ( true ) { m_iAtomPos = pQword->m_iAtomPos; m_pHitDoc = NULL; m_uHitsOverFor = 0; m_dQueriedFields.Set(); m_bHasWideFields = tSetup.m_pIndex && ( tSetup.m_pIndex->GetMatchSchema().m_dFields.GetLength()>32 ); m_iMaxTimer = tSetup.m_iMaxTimer; AllocDocinfo ( tSetup ); } void ExtTerm_c::Reset ( const ISphQwordSetup & tSetup ) { m_pHitDoc = NULL; m_uHitsOverFor = 0; m_iMaxTimer = tSetup.m_iMaxTimer; m_pQword->Reset (); tSetup.QwordSetup ( m_pQword ); } const ExtDoc_t * ExtTerm_c::GetDocsChunk ( SphDocID_t * pMaxID ) { if ( !m_pQword->m_iDocs ) return NULL; m_uMaxID = 0; // max_query_time if ( m_iMaxTimer>0 && sphMicroTimer()>=m_iMaxTimer ) { if ( m_pWarning ) *m_pWarning = "query time exceeded max_query_time"; return NULL; } // interrupt by sitgerm if ( m_bInterruptNow ) { if ( m_pWarning ) *m_pWarning = "Server shutdown in progress"; return NULL; } int iDoc = 0; CSphRowitem * pDocinfo = m_pDocinfo; while ( iDocGetNextDoc ( pDocinfo ); if ( !tMatch.m_iDocID ) { m_pQword->m_iDocs = 0; break; } if ( !m_bHasWideFields ) { // fields 0-31 can be quickly checked right here, right now if (!( m_pQword->m_dQwordFields.m_dFieldsMask[0] & m_dQueriedFields.m_dFieldsMask[0] )) continue; } else { // fields 32+ need to be checked with CollectHitMask() and stuff m_pQword->CollectHitMask(); if (!( m_pQword->m_dQwordFields.Test ( m_dQueriedFields ) )) continue; } ExtDoc_t & tDoc = m_dDocs[iDoc++]; tDoc.m_uDocid = tMatch.m_iDocID; tDoc.m_pDocinfo = pDocinfo; tDoc.m_uHitlistOffset = m_pQword->m_iHitlistPos; tDoc.m_uDocFields = m_pQword->m_dQwordFields.GetMask32() & m_dQueriedFields.GetMask32(); // OPTIMIZE: only needed for phrase node tDoc.m_fTFIDF = float(m_pQword->m_uMatchHits) / float(m_pQword->m_uMatchHits+SPH_BM25_K1) * m_fIDF; pDocinfo += m_iStride; } m_pHitDoc = NULL; return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtTerm_c::GetHitsChunk ( const ExtDoc_t * pMatched, SphDocID_t uMaxID ) { if ( !pMatched ) return NULL; SphDocID_t uFirstMatch = pMatched->m_uDocid; // aim to the right document ExtDoc_t * pDoc = m_pHitDoc; m_pHitDoc = NULL; if ( !pDoc ) { // if we already emitted hits for this matches block, do not do that again if ( uFirstMatch==m_uHitsOverFor ) return NULL; // early reject whole block if ( pMatched->m_uDocid > m_uMaxID ) return NULL; if ( m_uMaxID && m_dDocs[0].m_uDocid > uMaxID ) return NULL; // find match pDoc = m_dDocs; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched docs block is over for me, gimme another one } while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched doc block did not yet begin for me, gimme another one } } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); // setup hitlist reader m_pQword->SeekHitlist ( pDoc->m_uHitlistOffset ); } // hit emission int iHit = 0; while ( iHitGetNextHit(); if ( uHit==EMPTY_HIT ) { // no more hits; get next acceptable document pDoc++; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched docs block is over for me, gimme another one while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched doc block did not yet begin for me, gimme another one } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); if ( !pDoc ) break; assert ( pDoc->m_uDocid==pMatched->m_uDocid ); // setup hitlist reader m_pQword->SeekHitlist ( pDoc->m_uHitlistOffset ); continue; } if (!( m_dQueriedFields.Test ( HITMAN::GetField ( uHit ) ) )) continue; ExtHit_t & tHit = m_dHits[iHit++]; tHit.m_uDocid = pDoc->m_uDocid; tHit.m_uHitpos = uHit; tHit.m_uQuerypos = (WORD) m_iAtomPos; // assume less that 64K words per query tHit.m_uWeight = tHit.m_uMatchlen = tHit.m_uSpanlen = 1; } m_pHitDoc = pDoc; if ( iHit==0 || iHit=0 && iHitm_sWord ) ) return; m_fIDF = -1.0f; ExtQword_t tInfo; tInfo.m_sWord = m_pQword->m_sWord; tInfo.m_sDictWord = m_pQword->m_sDictWord; tInfo.m_iDocs = m_pQword->m_iDocs; tInfo.m_iHits = m_pQword->m_iHits; tInfo.m_iQueryPos = m_pQword->m_iAtomPos; tInfo.m_fIDF = -1.0f; // suppress gcc 4.2.3 warning tInfo.m_bExpanded = m_pQword->m_bExpanded; tInfo.m_bExcluded = m_pQword->m_bExcluded; hQwords.Add ( tInfo, m_pQword->m_sWord ); } void ExtTerm_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { if ( m_fIDF<0.0f ) { assert ( hQwords ( m_pQword->m_sWord ) ); m_fIDF = hQwords ( m_pQword->m_sWord )->m_fIDF; } } ////////////////////////////////////////////////////////////////////////// const ExtHit_t * ExtTermHitless_c::GetHitsChunk ( const ExtDoc_t * pMatched, SphDocID_t uMaxID ) { if ( !pMatched ) return NULL; SphDocID_t uFirstMatch = pMatched->m_uDocid; // aim to the right document ExtDoc_t * pDoc = m_pHitDoc; m_pHitDoc = NULL; if ( !pDoc ) { // if we already emitted hits for this matches block, do not do that again if ( uFirstMatch==m_uHitsOverFor ) return NULL; // early reject whole block if ( pMatched->m_uDocid > m_uMaxID ) return NULL; if ( m_uMaxID && m_dDocs[0].m_uDocid > uMaxID ) return NULL; // find match pDoc = m_dDocs; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched docs block is over for me, gimme another one } while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched doc block did not yet begin for me, gimme another one } } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); m_uFieldPos = 0; } // hit emission int iHit = 0; for ( ;; ) { if ( ( m_uFieldPos<32 && ( pDoc->m_uDocFields & ( 1 << m_uFieldPos ) ) ) // not necessary && m_dQueriedFields.Test ( m_uFieldPos ) ) { // emit hit ExtHit_t & tHit = m_dHits[iHit++]; tHit.m_uDocid = pDoc->m_uDocid; tHit.m_uHitpos = HITMAN::Create ( m_uFieldPos, -1 ); tHit.m_uQuerypos = (WORD) m_iAtomPos; tHit.m_uWeight = tHit.m_uMatchlen = tHit.m_uSpanlen = 1; if ( iHit==MAX_HITS-1 ) break; } if ( m_uFieldPos < CSphSmallBitvec::iTOTALBITS-1 ) { m_uFieldPos++; continue; } // field mask is empty, get next document pDoc++; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched docs block is over for me, gimme another one while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched doc block did not yet begin for me, gimme another one } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); if ( !pDoc ) break; m_uFieldPos = 0; } m_pHitDoc = pDoc; if ( iHit==0 || iHit=0 && iHit ExtTermPos_c::ExtTermPos_c ( ISphQword * pQword, const XQNode_t * pNode, const ISphQwordSetup & tSetup ) : ExtTerm_c ( pQword, pNode->m_dSpec.m_dFieldMask, tSetup, pNode->m_bNotWeighted ) , m_iMaxFieldPos ( pNode->m_dSpec.m_iFieldMaxPos ) , m_uTermMaxID ( 0 ) , m_pRawDocs ( NULL ) , m_pRawDoc ( NULL ) , m_pRawHit ( NULL ) , m_uLastID ( 0 ) , m_eState ( COPY_DONE ) , m_uDoneFor ( 0 ) , m_pZoneChecker ( tSetup.m_pZoneChecker ) , m_dZones ( pNode->m_dSpec.m_dZones ) , m_uLastZonedId ( 0 ) , m_iCheckFrom ( 0 ) { m_dMyDocs[0].m_uDocid = DOCID_MAX; m_dMyHits[0].m_uDocid = DOCID_MAX; m_dFilteredHits[0].m_uDocid = DOCID_MAX; AllocDocinfo ( tSetup ); } template < TermPosFilter_e T > void ExtTermPos_c::Reset ( const ISphQwordSetup & tSetup ) { ExtTerm_c::Reset(tSetup); m_uTermMaxID = 0; m_pRawDocs = NULL; m_pRawDoc = NULL; m_pRawHit = NULL; m_uLastID = 0; m_eState = COPY_DONE; m_uDoneFor = 0; m_uLastZonedId = 0; m_iCheckFrom = 0; m_dMyDocs[0].m_uDocid = DOCID_MAX; m_dMyHits[0].m_uDocid = DOCID_MAX; m_dFilteredHits[0].m_uDocid = DOCID_MAX; } template<> inline bool ExtTermPos_c::IsAcceptableHit ( const ExtHit_t * pHit ) const { return HITMAN::GetPos ( pHit->m_uHitpos )<=m_iMaxFieldPos; } template<> inline bool ExtTermPos_c::IsAcceptableHit ( const ExtHit_t * pHit ) const { return HITMAN::GetPos ( pHit->m_uHitpos )==1; } template<> inline bool ExtTermPos_c::IsAcceptableHit ( const ExtHit_t * pHit ) const { return HITMAN::IsEnd ( pHit->m_uHitpos ); } template<> inline bool ExtTermPos_c::IsAcceptableHit ( const ExtHit_t * pHit ) const { return HITMAN::GetPos ( pHit->m_uHitpos )==1 && HITMAN::IsEnd ( pHit->m_uHitpos ); } template<> inline bool ExtTermPos_c::IsAcceptableHit ( const ExtHit_t * pHit ) const { assert ( m_pZoneChecker ); if ( m_uLastZonedId!=pHit->m_uDocid ) m_iCheckFrom = 0; m_uLastZonedId = pHit->m_uDocid; // only check zones that actually match this document for ( int i=m_iCheckFrom; iIsInZone ( m_dZones[i], pHit ); switch ( eState ) { case SPH_ZONE_FOUND: return true; case SPH_ZONE_NO_DOCUMENT: Swap ( m_dZones[i], m_dZones[m_iCheckFrom] ); m_iCheckFrom++; break; default: break; } } return false; } template < TermPosFilter_e T > const ExtDoc_t * ExtTermPos_c::GetDocsChunk ( SphDocID_t * pMaxID ) { // fetch more docs if needed if ( !m_pRawDocs ) { m_pRawDocs = ExtTerm_c::GetDocsChunk ( &m_uTermMaxID ); if ( !m_pRawDocs ) return NULL; m_pRawDoc = m_pRawDocs; m_pRawHit = NULL; m_uLastID = 0; } // filter the hits, and build the documents list int iMyDoc = 0; int iMyHit = 0; const ExtDoc_t * pDoc = m_pRawDoc; // just a shortcut const ExtHit_t * pHit = m_pRawHit; const SphDocID_t uSkipID = m_uLastID; SphDocID_t uLastID = m_uLastID = 0; CSphRowitem * pDocinfo = m_pDocinfo; for ( ;; ) { // try to fetch more hits for current raw docs block if we're out if ( !pHit || pHit->m_uDocid==DOCID_MAX ) pHit = ExtTerm_c::GetHitsChunk ( m_pRawDocs, m_uTermMaxID ); // did we touch all the hits we had? if so, we're fully done with // current raw docs block, and should start a new one if ( !pHit ) { m_pRawDocs = ExtTerm_c::GetDocsChunk ( &m_uTermMaxID ); if ( !m_pRawDocs ) // no more incoming documents? bail break; pDoc = m_pRawDocs; pHit = NULL; continue; } // skip all tail hits hits from documents below or same ID as uSkipID // scan until next acceptable hit while ( pHit->m_uDocid < pDoc->m_uDocid || ( uSkipID && pHit->m_uDocid<=uSkipID ) ) // skip leftovers pHit++; while ( ( pHit->m_uDocid!=DOCID_MAX || ( uSkipID && pHit->m_uDocid<=uSkipID ) ) && !IsAcceptableHit ( pHit ) ) // skip unneeded hits pHit++; if ( pHit->m_uDocid==DOCID_MAX || ( uSkipID && pHit->m_uDocid<=uSkipID ) ) // check for eof continue; // find and emit new document while ( pDoc->m_uDocidm_uDocid ) pDoc++; // FIXME? unsafe in broken cases assert ( pDoc->m_uDocid==pHit->m_uDocid ); assert ( iMyDocm_uDocid ) CopyExtDoc ( m_dMyDocs[iMyDoc++], *pDoc, &pDocinfo, m_iStride ); uLastID = pDoc->m_uDocid; // copy acceptable hits for this document while ( iMyHitm_uDocid==uLastID ) { if ( IsAcceptableHit ( pHit ) ) m_dMyHits[iMyHit++] = *pHit; pHit++; } if ( iMyHit==MAX_HITS-1 ) { // there is no more space for acceptable hits; but further calls to GetHits() *might* produce some // we need to memorize the trailing document id m_uLastID = uLastID; break; } } m_pRawDoc = pDoc; m_pRawHit = pHit; assert ( iMyDoc>=0 && iMyDoc=0 && iMyHit const ExtHit_t * ExtTermPos_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { if ( m_eState==COPY_DONE ) { // this request completed in full if ( m_uDoneFor==pDocs->m_uDocid || !m_uDoneFor ) return NULL; // old request completed in full, but we have a new hits subchunk request now // even though there were no new docs requests in the meantime! m_eState = COPY_FILTERED; } m_uDoneFor = pDocs->m_uDocid; // regular case // copy hits for requested docs from my hits to filtered hits, and return those int iFilteredHits = 0; if ( m_eState==COPY_FILTERED ) { const ExtHit_t * pMyHit = m_dMyHits; for ( ;; ) { // skip hits that the caller is not interested in while ( pMyHit->m_uDocid < pDocs->m_uDocid ) pMyHit++; // out of acceptable hits? if ( pMyHit->m_uDocid==DOCID_MAX ) { // do we have a trailing document? if yes, we should also copy trailing hits m_eState = m_uLastID ? COPY_TRAILING : COPY_DONE; break; } // skip docs that i do not have while ( pDocs->m_uDocid < pMyHit->m_uDocid ) pDocs++; // out of requested docs? over and out if ( pDocs->m_uDocid==DOCID_MAX ) { m_eState = COPY_DONE; break; } // copy matching hits while ( iFilteredHitsm_uDocid==pMyHit->m_uDocid ) m_dFilteredHits[iFilteredHits++] = *pMyHit++; // paranoid check that we're not out of bounds assert ( iFilteredHits<=MAX_HITS-1 && pDocs->m_uDocid!=pMyHit->m_uDocid ); } } // trailing hits case // my hits did not have enough space, so we should pass raw hits for the last doc while ( m_eState==COPY_TRAILING && m_uLastID && iFilteredHitsm_uDocid==DOCID_MAX ) m_pRawHit = ExtTerm_c::GetHitsChunk ( m_pRawDocs, Min ( uMaxID, m_uTermMaxID ) ); // no more hits for current chunk if ( !m_pRawHit ) { m_eState = COPY_DONE; break; } // copy while we can while ( m_pRawHit->m_uDocid==m_uLastID && iFilteredHitsm_uDocid!=m_uLastID && m_pRawHit->m_uDocid!=DOCID_MAX ) m_eState = COPY_DONE; // in any case, this chunk is over break; } m_dFilteredHits[iFilteredHits].m_uDocid = DOCID_MAX; return iFilteredHits ? m_dFilteredHits : NULL; } ////////////////////////////////////////////////////////////////////////// ExtTwofer_c::ExtTwofer_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const ISphQwordSetup & tSetup ) { m_pChildren[0] = pFirst; m_pChildren[1] = pSecond; m_pCurHit[0] = NULL; m_pCurHit[1] = NULL; m_pCurDoc[0] = NULL; m_pCurDoc[1] = NULL; m_dNodePos[0] = 0; m_dNodePos[1] = 0; m_bPosAware = false; m_uMatchedDocid = 0; m_iAtomPos = ( pFirst && pFirst->m_iAtomPos ) ? pFirst->m_iAtomPos : 0; if ( pSecond && pSecond->m_iAtomPos && pSecond->m_iAtomPosm_iAtomPos; AllocDocinfo ( tSetup ); } ExtTwofer_c::~ExtTwofer_c () { SafeDelete ( m_pChildren[0] ); SafeDelete ( m_pChildren[1] ); } void ExtTwofer_c::Reset ( const ISphQwordSetup & tSetup ) { m_pChildren[0]->Reset ( tSetup ); m_pChildren[1]->Reset ( tSetup ); m_pCurHit[0] = NULL; m_pCurHit[1] = NULL; m_pCurDoc[0] = NULL; m_pCurDoc[1] = NULL; m_uMatchedDocid = 0; } void ExtTwofer_c::GetQwords ( ExtQwordsHash_t & hQwords ) { m_pChildren[0]->GetQwords ( hQwords ); m_pChildren[1]->GetQwords ( hQwords ); } void ExtTwofer_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { m_pChildren[0]->SetQwordsIDF ( hQwords ); m_pChildren[1]->SetQwordsIDF ( hQwords ); } ////////////////////////////////////////////////////////////////////////// const ExtDoc_t * ExtAnd_c::GetDocsChunk ( SphDocID_t * pMaxID ) { m_uMaxID = 0; const ExtDoc_t * pCur0 = m_pCurDoc[0]; const ExtDoc_t * pCur1 = m_pCurDoc[1]; int iDoc = 0; CSphRowitem * pDocinfo = m_pDocinfo; for ( ;; ) { // if any of the pointers is empty, *and* there is no data yet, process next child chunk // if there is data, we can't advance, because child hitlist offsets would be lost if ( !pCur0 || !pCur1 ) { if ( iDoc!=0 ) break; if ( !pCur0 ) pCur0 = m_pChildren[0]->GetDocsChunk ( NULL ); if ( !pCur1 ) pCur1 = m_pChildren[1]->GetDocsChunk ( NULL ); if ( !pCur0 || !pCur1 ) { m_pCurDoc[0] = NULL; m_pCurDoc[1] = NULL; return NULL; } } // find common matches assert ( pCur0 && pCur1 ); while ( iDocm_uDocid < pCur1->m_uDocid ) pCur0++; if ( pCur0->m_uDocid==DOCID_MAX ) { pCur0 = NULL; break; } while ( pCur1->m_uDocid < pCur0->m_uDocid ) pCur1++; if ( pCur1->m_uDocid==DOCID_MAX ) { pCur1 = NULL; break; } if ( pCur0->m_uDocid!=pCur1->m_uDocid ) continue; // emit it ExtDoc_t & tDoc = m_dDocs[iDoc++]; tDoc.m_uDocid = pCur0->m_uDocid; tDoc.m_uDocFields = pCur0->m_uDocFields | pCur1->m_uDocFields; // not necessary tDoc.m_uHitlistOffset = -1; tDoc.m_fTFIDF = pCur0->m_fTFIDF + pCur1->m_fTFIDF; CopyExtDocinfo ( tDoc, *pCur0, &pDocinfo, m_iStride ); // skip it pCur0++; if ( pCur0->m_uDocid==DOCID_MAX ) pCur0 = NULL; pCur1++; if ( pCur1->m_uDocid==DOCID_MAX ) pCur1 = NULL; if ( !pCur0 || !pCur1 ) break; } } m_pCurDoc[0] = pCur0; m_pCurDoc[1] = pCur1; return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtAnd_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { const ExtHit_t * pCur0 = m_pCurHit[0]; const ExtHit_t * pCur1 = m_pCurHit[1]; if ( m_uMatchedDocid < pDocs->m_uDocid ) m_uMatchedDocid = 0; int iHit = 0; WORD uNodePos0 = m_dNodePos[0]; WORD uNodePos1 = m_dNodePos[1]; while ( iHitm_uDocid==m_uMatchedDocid ) || ( pCur1 && pCur1->m_uDocid==m_uMatchedDocid ) ) ) { // merge, while possible if ( pCur0 && pCur1 && pCur0->m_uDocid==m_uMatchedDocid && pCur1->m_uDocid==m_uMatchedDocid ) while ( iHitm_uHitpos < pCur1->m_uHitpos ) || ( pCur0->m_uHitpos==pCur1->m_uHitpos && pCur0->m_uQuerypos>pCur1->m_uQuerypos ) ) { m_dHits[iHit] = *pCur0++; if ( uNodePos0!=0 ) m_dHits[iHit++].m_uNodepos = uNodePos0; else iHit++; if ( pCur0->m_uDocid!=m_uMatchedDocid ) break; } else { m_dHits[iHit] = *pCur1++; if ( uNodePos1!=0 ) m_dHits[iHit++].m_uNodepos = uNodePos1; else iHit++; if ( pCur1->m_uDocid!=m_uMatchedDocid ) break; } } // copy tail, while possible, unless the other child is at the end of a hit block if ( pCur0 && pCur0->m_uDocid==m_uMatchedDocid && !( pCur1 && pCur1->m_uDocid==DOCID_MAX ) ) { while ( pCur0->m_uDocid==m_uMatchedDocid && iHitm_uDocid==m_uMatchedDocid && !( pCur0 && pCur0->m_uDocid==DOCID_MAX ) ) { while ( pCur1->m_uDocid==m_uMatchedDocid && iHitm_uDocid!=m_uMatchedDocid && pCur0->m_uDocid!=DOCID_MAX ) && ( pCur1 && pCur1->m_uDocid!=m_uMatchedDocid && pCur1->m_uDocid!=DOCID_MAX ) ) m_uMatchedDocid = 0; // warmup if needed if ( !pCur0 || pCur0->m_uDocid==DOCID_MAX ) pCur0 = m_pChildren[0]->GetHitsChunk ( pDocs, uMaxID ); if ( !pCur1 || pCur1->m_uDocid==DOCID_MAX ) pCur1 = m_pChildren[1]->GetHitsChunk ( pDocs, uMaxID ); // one of the hitlists is over if ( !pCur0 || !pCur1 ) { if ( !pCur0 && !pCur1 ) break; // both are over, we're done // one is over, but we still need to copy the other one m_uMatchedDocid = pCur0 ? pCur0->m_uDocid : pCur1->m_uDocid; assert ( m_uMatchedDocid!=DOCID_MAX ); continue; } // find matching doc assert ( pCur1 && pCur0 ); while ( !m_uMatchedDocid ) { while ( pCur0->m_uDocid < pCur1->m_uDocid ) pCur0++; if ( pCur0->m_uDocid==DOCID_MAX ) break; while ( pCur1->m_uDocid < pCur0->m_uDocid ) pCur1++; if ( pCur1->m_uDocid==DOCID_MAX ) break; if ( pCur0->m_uDocid==pCur1->m_uDocid ) m_uMatchedDocid = pCur0->m_uDocid; } } m_pCurHit[0] = pCur0; m_pCurHit[1] = pCur1; assert ( iHit>=0 && iHitm_uSpanlen + pRight->m_uHitpos - pLeft->m_uHitpos; } ////////////////////////////////////////////////////////////////////////// const ExtDoc_t * ExtOr_c::GetDocsChunk ( SphDocID_t * pMaxID ) { m_uMaxID = 0; const ExtDoc_t * pCur0 = m_pCurDoc[0]; const ExtDoc_t * pCur1 = m_pCurDoc[1]; DWORD uTouched = 0; int iDoc = 0; CSphRowitem * pDocinfo = m_pDocinfo; while ( iDocm_uDocid==DOCID_MAX ) { if ( uTouched & 1 ) break; // it was touched, so we can't advance, because child hitlist offsets would be lost pCur0 = m_pChildren[0]->GetDocsChunk ( NULL ); } if ( !pCur1 || pCur1->m_uDocid==DOCID_MAX ) { if ( uTouched & 2 ) break; // it was touched, so we can't advance, because child hitlist offsets would be lost pCur1 = m_pChildren[1]->GetDocsChunk ( NULL ); } // check if we're over if ( !pCur0 && !pCur1 ) break; // merge lists while we can, copy tail while if we can not if ( pCur0 && pCur1 ) { // merge lists if we have both of them while ( iDocm_uDocid < pCur1->m_uDocid && iDocm_uDocid==DOCID_MAX ) { pCur0 = NULL; break; } // copy min docids from 2nd child while ( pCur1->m_uDocid < pCur0->m_uDocid && iDocm_uDocid==DOCID_MAX ) { pCur1 = NULL; break; } // copy min docids from both children assert ( pCur0->m_uDocid && pCur0->m_uDocid!=DOCID_MAX ); assert ( pCur1->m_uDocid && pCur1->m_uDocid!=DOCID_MAX ); while ( pCur0->m_uDocid==pCur1->m_uDocid && pCur0->m_uDocid!=DOCID_MAX && iDocm_uDocFields | pCur1->m_uDocFields; // not necessary m_dDocs[iDoc].m_fTFIDF = pCur0->m_fTFIDF + pCur1->m_fTFIDF; CopyExtDocinfo ( m_dDocs[iDoc], *pCur0, &pDocinfo, m_iStride ); iDoc++; pCur0++; pCur1++; uTouched |= 3; } if ( pCur0->m_uDocid==DOCID_MAX ) { pCur0 = NULL; break; } if ( pCur1->m_uDocid==DOCID_MAX ) { pCur1 = NULL; break; } } } else { // copy tail if we don't have both lists const ExtDoc_t * pList = pCur0 ? pCur0 : pCur1; if ( pList->m_uDocid!=DOCID_MAX && iDocm_uDocid!=DOCID_MAX && iDocm_uDocid==DOCID_MAX ) pList = NULL; if ( pCur0 ) pCur0 = pList; else pCur1 = pList; } } m_pCurDoc[0] = pCur0; m_pCurDoc[1] = pCur1; return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtOr_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { const ExtHit_t * pCur0 = m_pCurHit[0]; const ExtHit_t * pCur1 = m_pCurHit[1]; int iHit = 0; while ( iHitm_uDocid==m_uMatchedDocid ) || ( pCur1 && pCur1->m_uDocid==m_uMatchedDocid ) ) ) { // merge, while possible if ( pCur0 && pCur1 && pCur0->m_uDocid==m_uMatchedDocid && pCur1->m_uDocid==m_uMatchedDocid ) while ( iHitm_uHitpos < pCur1->m_uHitpos ) { m_dHits[iHit++] = *pCur0++; if ( pCur0->m_uDocid!=m_uMatchedDocid ) break; } else { m_dHits[iHit++] = *pCur1++; if ( pCur1->m_uDocid!=m_uMatchedDocid ) break; } } // a pretty tricky bit // one of the nodes might have run out of current hits chunk (rather hits at all) // so we need to get the next hits chunk NOW, check for that condition, and keep merging // simply going to tail hits copying is incorrect, it could copy in wrong order // example, word A, pos 1, 2, 3, hit chunk ends, 4, 5, 6, word B, pos 7, 8, 9 if ( !pCur0 || pCur0->m_uDocid==DOCID_MAX ) { pCur0 = m_pChildren[0]->GetHitsChunk ( pDocs, uMaxID ); if ( pCur0 && pCur0->m_uDocid==m_uMatchedDocid ) continue; } if ( !pCur1 || pCur1->m_uDocid==DOCID_MAX ) { pCur1 = m_pChildren[1]->GetHitsChunk ( pDocs, uMaxID ); if ( pCur1 && pCur1->m_uDocid==m_uMatchedDocid ) continue; } // copy tail, while possible if ( pCur0 && pCur0->m_uDocid==m_uMatchedDocid ) { while ( pCur0->m_uDocid==m_uMatchedDocid && iHitm_uDocid==m_uMatchedDocid ); while ( pCur1->m_uDocid==m_uMatchedDocid && iHitm_uDocid!=m_uMatchedDocid ) && ( pCur1 && pCur1->m_uDocid!=m_uMatchedDocid ) ) m_uMatchedDocid = 0; // warmup if needed if ( !pCur0 || pCur0->m_uDocid==DOCID_MAX ) pCur0 = m_pChildren[0]->GetHitsChunk ( pDocs, uMaxID ); if ( !pCur1 || pCur1->m_uDocid==DOCID_MAX ) pCur1 = m_pChildren[1]->GetHitsChunk ( pDocs, uMaxID ); if ( !pCur0 && !pCur1 ) break; m_uMatchedDocid = ( pCur0 && pCur1 ) ? Min ( pCur0->m_uDocid, pCur1->m_uDocid ) : ( pCur0 ? pCur0->m_uDocid : pCur1->m_uDocid ); } m_pCurHit[0] = pCur0; m_pCurHit[1] = pCur1; assert ( iHit>=0 && iHitGetDocsChunk ( pMaxID ); // otherwise, do some removals m_uMaxID = 0; const ExtDoc_t * pCur0 = m_pCurDoc[0]; const ExtDoc_t * pCur1 = m_pCurDoc[1]; int iDoc = 0; CSphRowitem * pDocinfo = m_pDocinfo; while ( iDocm_uDocid==DOCID_MAX ) { // there were matches; we can not pull more because that'd fuckup hitlists if ( iDoc ) break; // no matches so far; go pull pCur0 = m_pChildren[0]->GetDocsChunk ( NULL ); if ( !pCur0 ) break; } // pull more docs from reject, if nedeed if ( !pCur1 || pCur1->m_uDocid==DOCID_MAX ) pCur1 = m_pChildren[1]->GetDocsChunk ( NULL ); // if there's nothing to filter against, simply copy leftovers if ( !pCur1 ) { assert ( pCur0 ); while ( pCur0->m_uDocid!=DOCID_MAX && iDocm_uDocid==DOCID_MAX ) m_bPassthrough = true; break; } // perform filtering assert ( pCur0 ); assert ( pCur1 ); for ( ;; ) { assert ( iDocm_uDocid!=DOCID_MAX ); assert ( pCur1->m_uDocid!=DOCID_MAX ); // copy accepted until min rejected id while ( pCur0->m_uDocid < pCur1->m_uDocid && iDocm_uDocid==DOCID_MAX || iDoc==MAX_DOCS-1 ) break; // skip rejected until min accepted id while ( pCur1->m_uDocid < pCur0->m_uDocid ) pCur1++; if ( pCur1->m_uDocid==DOCID_MAX ) break; // skip both while ids match while ( pCur0->m_uDocid==pCur1->m_uDocid && pCur0->m_uDocid!=DOCID_MAX ) { pCur0++; pCur1++; } if ( pCur0->m_uDocid==DOCID_MAX || pCur1->m_uDocid==DOCID_MAX ) break; } } m_pCurDoc[0] = pCur0; m_pCurDoc[1] = pCur1; return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtAndNot_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { return m_pChildren[0]->GetHitsChunk ( pDocs, uMaxID ); }; void ExtAndNot_c::Reset ( const ISphQwordSetup & tSetup ) { m_bPassthrough = false; ExtTwofer_c::Reset ( tSetup ); } ////////////////////////////////////////////////////////////////////////// ExtNWayT::ExtNWayT ( const CSphVector & dNodes, DWORD, const XQNode_t &, const ISphQwordSetup & tSetup ) : m_pNode ( NULL ) , m_pDocs ( NULL ) , m_pHits ( NULL ) , m_pDoc ( NULL ) , m_pHit ( NULL ) , m_pMyDoc ( NULL ) , m_pMyHit ( NULL ) , m_uLastDocID ( 0 ) , m_uMatchedDocid ( 0 ) , m_uHitsOverFor ( 0 ) { assert ( dNodes.GetLength()>1 ); m_iAtomPos = dNodes[0]->m_iAtomPos; m_dMyHits[0].m_uDocid = DOCID_MAX; AllocDocinfo ( tSetup ); } ExtNWayT::~ExtNWayT () { SafeDelete ( m_pNode ); } void ExtNWayT::Reset ( const ISphQwordSetup & tSetup ) { m_pNode->Reset ( tSetup ); m_pDocs = NULL; m_pHits = NULL; m_pDoc = NULL; m_pHit = NULL; m_pMyDoc = NULL; m_pMyHit = NULL; m_uLastDocID = 0; m_uMatchedDocid = 0; m_uHitsOverFor = 0; m_dMyHits[0].m_uDocid = DOCID_MAX; } void ExtNWayT::GetQwords ( ExtQwordsHash_t & hQwords ) { assert ( m_pNode ); m_pNode->GetQwords ( hQwords ); } void ExtNWayT::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { assert ( m_pNode ); m_pNode->SetQwordsIDF ( hQwords ); } template < class FSM > const ExtDoc_t * ExtNWay_c::GetDocsChunk ( SphDocID_t * pMaxID ) { m_uMaxID = 0; // initial warmup if ( !m_pDoc ) { if ( !m_pDocs ) m_pDocs = m_pNode->GetDocsChunk ( &m_uDocsMaxID ); if ( !m_pDocs ) return NULL; // no more docs m_pDoc = m_pDocs; } // shortcuts const ExtDoc_t * pDoc = m_pDoc; const ExtHit_t * pHit = m_pHit; FSM::ResetFSM(); // skip leftover hits while ( m_uLastDocID ) { if ( !pHit || pHit->m_uDocid==DOCID_MAX ) { pHit = m_pHits = m_pNode->GetHitsChunk ( m_pDocs, m_uDocsMaxID ); if ( !pHit ) break; } while ( pHit->m_uDocid==m_uLastDocID ) pHit++; if ( pHit->m_uDocid!=DOCID_MAX && pHit->m_uDocid!=m_uLastDocID ) m_uLastDocID = 0; } // search for matches int iDoc = 0; int iHit = 0; CSphRowitem * pDocinfo = m_pDocinfo; while ( iHitm_uDocid==DOCID_MAX ) { // grab more hits pHit = m_pHits = m_pNode->GetHitsChunk ( m_pDocs, m_uDocsMaxID ); if ( m_pHits ) continue; m_uMatchedDocid = 0; // no more hits for current docs chunk; grab more docs pDoc = m_pDocs = m_pNode->GetDocsChunk ( &m_uDocsMaxID ); if ( !m_pDocs ) break; // we got docs, there must be hits pHit = m_pHits = m_pNode->GetHitsChunk ( m_pDocs, m_uDocsMaxID ); assert ( pHit ); continue; } // check if the incoming hit is out of bounds, or affects min pos if ( pHit->m_uDocid!=m_uMatchedDocid ) { m_uMatchedDocid = pHit->m_uDocid; FSM::ResetFSM(); continue; } if ( FSM::HitFSM ( pHit, &m_dMyHits[iHit] ) ) { // emit document, if it's new if ( pHit->m_uDocid!=m_uLastDocID ) { assert ( pDoc->m_uDocid<=pHit->m_uDocid ); while ( pDoc->m_uDocid < pHit->m_uDocid ) pDoc++; assert ( pDoc->m_uDocid==pHit->m_uDocid ); m_dDocs[iDoc].m_uDocid = pHit->m_uDocid; m_dDocs[iDoc].m_uDocFields = 1<< ( HITMAN::GetField ( pHit->m_uHitpos ) ); // non necessary m_dDocs[iDoc].m_uHitlistOffset = -1; m_dDocs[iDoc].m_fTFIDF = pDoc->m_fTFIDF; CopyExtDocinfo ( m_dDocs[iDoc], *pDoc, &pDocinfo, m_iStride ); iDoc++; m_uLastDocID = pHit->m_uDocid; } iHit++; } // go on pHit++; } // reset current positions for hits chunk getter m_pMyDoc = m_dDocs; m_pMyHit = m_dMyHits; // save shortcuts m_pDoc = pDoc; m_pHit = pHit; assert ( iHit>=0 && iHit const ExtHit_t * ExtNWay_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { // if we already emitted hits for this matches block, do not do that again SphDocID_t uFirstMatch = pDocs->m_uDocid; if ( uFirstMatch==m_uHitsOverFor ) return NULL; // early reject whole block if ( pDocs->m_uDocid > m_uMaxID ) return NULL; if ( m_uMaxID && m_dDocs[0].m_uDocid > uMaxID ) return NULL; // shortcuts const ExtDoc_t * pMyDoc = m_pMyDoc; const ExtHit_t * pMyHit = m_pMyHit; assert ( pMyDoc ); assert ( pMyHit ); // filter and copy hits from m_dMyHits int iHit = 0; while ( iHitm_uDocid < pDocs->m_uDocid ) pMyDoc++; if ( pMyDoc->m_uDocid==DOCID_MAX ) break; while ( pDocs->m_uDocid < pMyDoc->m_uDocid ) pDocs++; if ( pDocs->m_uDocid==DOCID_MAX ) break; } while ( pDocs->m_uDocid!=pMyDoc->m_uDocid ); if ( pDocs->m_uDocid!=pMyDoc->m_uDocid ) { assert ( pMyDoc->m_uDocid==DOCID_MAX || pDocs->m_uDocid==DOCID_MAX ); break; } assert ( pDocs->m_uDocid==pMyDoc->m_uDocid ); assert ( pDocs->m_uDocid!=0 ); assert ( pDocs->m_uDocid!=DOCID_MAX ); m_uMatchedDocid = pDocs->m_uDocid; } // skip until we have to while ( pMyHit->m_uDocid < m_uMatchedDocid ) pMyHit++; // copy while we can if ( pMyHit->m_uDocid!=DOCID_MAX ) { assert ( pMyHit->m_uDocid==m_uMatchedDocid ); assert ( m_uMatchedDocid!=0 && m_uMatchedDocid!=DOCID_MAX ); while ( pMyHit->m_uDocid==m_uMatchedDocid && iHitm_uDocid!=m_uMatchedDocid && pMyHit->m_uDocid!=DOCID_MAX ) { // it's simply next document in the line; switch to it m_uMatchedDocid = 0; pMyDoc++; } else if ( pMyHit->m_uDocid==DOCID_MAX && !m_pHit ) { // it's the end break; } else if ( pMyHit->m_uDocid==DOCID_MAX && m_pHit && iHitm_uDocid ); assert ( m_uMatchedDocid==m_uLastDocID ); assert ( !m_pDoc || m_uMatchedDocid==m_pDoc->m_uDocid ); m_pMyDoc = pMyDoc; if ( EmitTail(iHit) ) m_uHitsOverFor = uFirstMatch; pMyDoc = m_pMyDoc; } } // save shortcuts m_pMyDoc = pMyDoc; m_pMyHit = pMyHit; assert ( iHit>=0 && iHit bool ExtNWay_c::EmitTail ( int & iHit ) { const ExtHit_t * pHit = m_pHit; const ExtDoc_t * pMyDoc = m_pMyDoc; bool bTailFinished = false; while ( iHitm_uDocid==DOCID_MAX ) { pHit = m_pHits = m_pNode->GetHitsChunk ( m_pDocs, m_uDocsMaxID ); if ( !pHit ) { m_uMatchedDocid = 0; pMyDoc++; break; } } // stop and finish on the first new id if ( pHit->m_uDocid!=m_uMatchedDocid ) { // reset hits getter; this docs chunk from above is finally over bTailFinished = true; m_uMatchedDocid = 0; pMyDoc++; break; } if ( FSM::HitFSM ( pHit, &m_dHits[iHit] ) ) iHit++; pHit++; } // save shortcut m_pHit = pHit; m_pMyDoc = pMyDoc; return bTailFinished; } ////////////////////////////////////////////////////////////////////////// FSMphrase::FSMphrase ( const CSphVector & dQwords, DWORD, const XQNode_t & , const ISphQwordSetup & ) : m_uExpQpos ( 0 ) , m_uExpPos ( 0 ) , m_uLeaves ( dQwords.GetLength() ) { m_uMinQpos = dQwords[0]->m_iAtomPos; m_uMaxQpos = dQwords.Last()->m_iAtomPos; m_dQposDelta.Resize ( m_uMaxQpos-m_uMinQpos+1 ); ARRAY_FOREACH ( i, m_dQposDelta ) m_dQposDelta[i] = -INT_MAX; for ( int i=1; i<(int)m_uLeaves; i++ ) m_dQposDelta [ dQwords[i-1]->m_iAtomPos - dQwords[0]->m_iAtomPos ] = dQwords[i]->m_iAtomPos - dQwords[i-1]->m_iAtomPos; } inline bool FSMphrase::HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ) { // unexpected too-low position? must be duplicate keywords for the previous one ("aaa bbb aaa ccc" case); just skip them if ( HITMAN::GetLCS ( pHit->m_uHitpos )m_uHitpos )!=m_uExpPos ) { // stream position out of sequence; reset expected positions if ( pHit->m_uQuerypos==m_uMinQpos ) { m_uExpPos = HITMAN::GetLCS ( pHit->m_uHitpos ) + m_dQposDelta[0]; m_uExpQpos = pHit->m_uQuerypos + m_dQposDelta[0]; } else m_uExpPos = m_uExpQpos = 0; return false; } // scan all hits with matching stream position // duplicate stream positions occur when there are duplicate query words // stream position is as expected; let's check query position if ( pHit->m_uQuerypos!=m_uExpQpos ) { // unexpected query position // do nothing; there might be other words in same (!) expected position following, with proper query positions // (eg. if the query words are repeated) if ( pHit->m_uQuerypos==m_uMinQpos ) { m_uExpPos = pHit->m_uHitpos + m_dQposDelta[0]; m_uExpQpos = pHit->m_uQuerypos + m_dQposDelta[0]; } return false; } if ( m_uExpQpos!=m_uMaxQpos ) { // intermediate expected position; keep looking assert ( pHit->m_uQuerypos==m_uExpQpos ); int iDelta = m_dQposDelta [ pHit->m_uQuerypos - m_uMinQpos ]; m_uExpPos += iDelta; m_uExpQpos += iDelta; // FIXME! what if there *more* hits with current pos following? return false; } // expected position which concludes the phrase; emit next match assert ( pHit->m_uQuerypos==m_uExpQpos ); DWORD uSpanlen = m_uMaxQpos - m_uMinQpos; // emit directly into m_dHits, this is no need to disturb m_dMyHits here. dTarget->m_uDocid = pHit->m_uDocid; dTarget->m_uHitpos = HITMAN::GetLCS ( pHit->m_uHitpos ) - uSpanlen; dTarget->m_uQuerypos = (WORD) m_uMinQpos; dTarget->m_uMatchlen = dTarget->m_uSpanlen = (WORD)( uSpanlen + 1 ); dTarget->m_uWeight = m_uLeaves; m_uExpPos = m_uExpQpos = 0; return true; } ////////////////////////////////////////////////////////////////////////// FSMproximity::FSMproximity ( const CSphVector & dQwords, DWORD, const XQNode_t & tNode, const ISphQwordSetup & ) : m_iMaxDistance ( tNode.m_iOpArg ) , m_uWordsExpected ( dQwords.GetLength() ) , m_uExpPos ( 0 ) { assert ( m_iMaxDistance>0 ); m_uMinQpos = dQwords[0]->m_iAtomPos; m_uQLen = dQwords.Last()->m_iAtomPos - m_uMinQpos; m_dProx.Resize ( m_uQLen+1 ); m_dDeltas.Resize ( m_uQLen+1 ); } inline bool FSMproximity::HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ) { // walk through the hitlist and update context int iQindex = pHit->m_uQuerypos - m_uMinQpos; DWORD uHitpos = HITMAN::GetLCS ( pHit->m_uHitpos ); // check if the word is new if ( m_dProx[iQindex]==UINT_MAX ) m_uWords++; // update the context m_dProx[iQindex] = uHitpos; // check if the incoming hit is out of bounds, or affects min pos if ( uHitpos>=m_uExpPos // out of expected bounds || iQindex==m_iMinQindex ) // or simply affects min pos { m_iMinQindex = iQindex; int iMinPos = uHitpos - m_uQLen - m_iMaxDistance; ARRAY_FOREACH ( i, m_dProx ) if ( m_dProx[i]!=UINT_MAX ) { if ( (int)m_dProx[i]<=iMinPos ) { m_dProx[i] = UINT_MAX; m_uWords--; continue; } if ( m_dProx[i]m_uHitpos - m_dProx[m_iMinQindex] - m_uQLen ) DWORD uMax = 0; ARRAY_FOREACH ( i, m_dProx ) { m_dDeltas[i] = m_dProx[i] - i; uMax = Max ( uMax, m_dProx[i] ); } m_dDeltas.Sort (); DWORD uWeight = 0; int iLast = -INT_MAX; ARRAY_FOREACH ( i, m_dDeltas ) { if ( m_dDeltas[i]==iLast ) uWeight++; else uWeight = 1; iLast = m_dDeltas[i]; } // emit hit dTarget->m_uDocid = pHit->m_uDocid; dTarget->m_uHitpos = Hitpos_t ( m_dProx[m_iMinQindex] ); // !COMMIT strictly speaking this is creation from LCS not value dTarget->m_uQuerypos = (WORD) m_uMinQpos; dTarget->m_uSpanlen = dTarget->m_uMatchlen = (WORD)( uMax-m_dProx[m_iMinQindex]+1 ); dTarget->m_uWeight = uWeight; // remove current min, and force recompue m_dProx[m_iMinQindex] = UINT_MAX; m_iMinQindex = -1; m_uWords--; m_uExpPos = 0; return true; } ////////////////////////////////////////////////////////////////////////// FSMmultinear::FSMmultinear ( const CSphVector & dNodes, DWORD, const XQNode_t & tNode, const ISphQwordSetup & ) : m_iNear ( tNode.m_iOpArg ) , m_uWordsExpected ( dNodes.GetLength() ) { if ( m_uWordsExpected==2 ) m_bTwofer = true; else { m_dNpos.Reserve ( m_uWordsExpected ); m_dRing.Resize ( m_uWordsExpected ); m_bTwofer = false; } assert ( m_iNear>0 ); } inline bool FSMmultinear::HitFSM ( const ExtHit_t* pHit, ExtHit_t* dTarget ) { // walk through the hitlist and update context DWORD uHitpos = HITMAN::GetLCS ( pHit->m_uHitpos ); WORD uNpos = pHit->m_uNodepos; WORD uQpos = pHit->m_uQuerypos; // skip dupe hit (may be emitted by OR node, for example) if ( m_uLastP==uHitpos ) { // check if the hit is subset of another one if ( m_uPrelastP && m_uLastML < pHit->m_uMatchlen ) { // roll back pre-last to check agains this new hit. m_uLastML = m_uPrelastML; m_uLastSL = m_uPrelastSL; m_uFirstHit = m_uLastP = m_uPrelastP; m_uWeight = m_uWeight - m_uLastW + m_uPrelastW; } else return false; } // probably new chain if ( m_uLastP==0 || ( m_uLastP + m_uLastML + m_iNear )<=uHitpos ) { m_uFirstHit = m_uLastP = uHitpos; m_uLastML = pHit->m_uMatchlen; m_uLastSL = pHit->m_uSpanlen; m_uWeight = m_uLastW = pHit->m_uWeight; if ( m_bTwofer ) { m_uFirstQpos = uQpos; m_uFirstNpos = uNpos; } else { m_dNpos.Resize(1); m_dNpos[0] = uNpos; Add2Ring ( pHit ); } return false; } // this hit (with such querypos) already was there. Skip the hit. if ( m_bTwofer ) { // special case for twofer: hold the overlapping if ( ( m_uFirstHit + m_uLastML )>uHitpos && ( m_uFirstHit + m_uLastML )<( uHitpos + pHit->m_uMatchlen ) && m_uLastML!=pHit->m_uMatchlen ) { m_uFirstHit = m_uLastP = uHitpos; m_uLastML = pHit->m_uMatchlen; m_uLastSL = pHit->m_uSpanlen; m_uWeight = m_uLastW = pHit->m_uWeight; m_uFirstQpos = uQpos; m_uFirstNpos = uNpos; return false; } if ( uNpos==m_uFirstNpos ) { if ( m_uLastP < uHitpos ) { m_uPrelastML = m_uLastML; m_uPrelastSL = m_uLastSL; m_uPrelastP = m_uLastP; m_uPrelastW = pHit->m_uWeight; m_uFirstHit = m_uLastP = uHitpos; m_uLastML = pHit->m_uMatchlen; m_uLastSL = pHit->m_uSpanlen; m_uWeight = m_uLastW = m_uPrelastW; m_uFirstQpos = uQpos; m_uFirstNpos = uNpos; } return false; } } else { if ( uNpos < m_dNpos[0] ) { m_uFirstQpos = Min ( m_uFirstQpos, uQpos ); m_dNpos.Insert ( 0, uNpos ); } else if ( uNpos > m_dNpos.Last() ) { m_uFirstQpos = Min ( m_uFirstQpos, uQpos ); m_dNpos.Add ( uNpos ); } else if ( uNpos!=m_dNpos[0] && uNpos!=m_dNpos.Last() ) { int iEnd = m_dNpos.GetLength(); int iStart = 0; int iMid = -1; while ( iEnd-iStart>1 ) { iMid = ( iStart + iEnd ) / 2; if ( uNpos==m_dNpos[iMid] ) { const ExtHit_t& dHit = m_dRing[m_iRing]; // last addition same as the first. So, we can shift if ( uNpos==dHit.m_uNodepos ) { m_uWeight -= dHit.m_uWeight; m_uFirstHit = HITMAN::GetLCS ( dHit.m_uHitpos ); ShiftRing(); // last addition same as the first. So, we can shift } else if ( uNpos==m_dRing [ RingTail() ].m_uNodepos ) m_uWeight -= m_dRing [ RingTail() ].m_uWeight; else return false; } if ( uNposm_uWeight; m_uLastML = pHit->m_uMatchlen; m_uLastSL = pHit->m_uSpanlen; Add2Ring ( pHit ); // finally got the whole chain - emit it! // warning: we don't support overlapping in generic chains. if ( m_bTwofer || (int)m_uWordsExpected==m_dNpos.GetLength() ) { dTarget->m_uDocid = pHit->m_uDocid; dTarget->m_uHitpos = Hitpos_t ( m_uFirstHit ); // !COMMIT strictly speaking this is creation from LCS not value dTarget->m_uMatchlen = (WORD)( uHitpos - m_uFirstHit + m_uLastML ); dTarget->m_uWeight = m_uWeight; m_uPrelastP = 0; if ( m_bTwofer ) // for exactly 2 words allow overlapping - so, just shift the chain, not reset it { dTarget->m_uQuerypos = Min ( m_uFirstQpos, pHit->m_uQuerypos ); dTarget->m_uSpanlen = 2; m_uFirstHit = m_uLastP = uHitpos; m_uWeight = pHit->m_uWeight; m_uFirstQpos = pHit->m_uQuerypos; } else { dTarget->m_uQuerypos = Min ( m_uFirstQpos, pHit->m_uQuerypos ); dTarget->m_uSpanlen = (WORD) m_dNpos.GetLength(); m_uLastP = 0; } return true; } m_uLastP = uHitpos; return false; } ////////////////////////////////////////////////////////////////////////// ExtQuorum_c::ExtQuorum_c ( CSphVector & dQwords, DWORD uDupeMask, const XQNode_t & tNode, const ISphQwordSetup & ) { assert ( tNode.GetOp()==SPH_QUERY_QUORUM ); m_iThresh = tNode.m_iOpArg; m_bDone = false; assert ( dQwords.GetLength()>1 ); // use TERM instead assert ( dQwords.GetLength()<=32 ); // internal masks are 32 bits assert ( m_iThresh>=1 ); // 1 is also OK; it's a bit different from just OR assert ( m_iThresh0 ) m_iAtomPos = dQwords[0]->m_iAtomPos; ARRAY_FOREACH ( i, dQwords ) { m_dInitialChildren.Add ( dQwords[i] ); m_pCurDoc.Add ( NULL ); m_pCurHit.Add ( NULL ); } m_dChildren = m_dInitialChildren; m_uMask = m_uInitialMask = uDupeMask; m_uMaskEnd = dQwords.GetLength() - 1; m_uMatchedDocid = 0; } ExtQuorum_c::~ExtQuorum_c () { ARRAY_FOREACH ( i, m_dInitialChildren ) SafeDelete ( m_dInitialChildren[i] ); } void ExtQuorum_c::Reset ( const ISphQwordSetup & tSetup ) { m_bDone = false; m_pCurDoc.Resize ( m_dInitialChildren.GetLength() ); m_pCurHit.Resize ( m_dInitialChildren.GetLength() ); m_dChildren.Resize ( m_dInitialChildren.GetLength() ); ARRAY_FOREACH ( i, m_dInitialChildren ) { m_dChildren[i] = m_dInitialChildren[i]; m_pCurDoc[i] = NULL; m_pCurHit[i] = NULL; } m_uMask = m_uInitialMask; m_uMaskEnd = m_dChildren.GetLength() - 1; m_uMatchedDocid = 0; ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->Reset ( tSetup ); } void ExtQuorum_c::GetQwords ( ExtQwordsHash_t & hQwords ) { ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->GetQwords ( hQwords ); } void ExtQuorum_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->SetQwordsIDF ( hQwords ); } const ExtDoc_t * ExtQuorum_c::GetDocsChunk ( SphDocID_t * pMaxID ) { // warmup ARRAY_FOREACH ( i, m_pCurDoc ) if ( !m_pCurDoc[i] || m_pCurDoc[i]->m_uDocid==DOCID_MAX ) { m_pCurDoc[i] = m_dChildren[i]->GetDocsChunk ( NULL ); if ( m_pCurDoc[i] ) continue; if ( m_dChildren.GetLength()==m_iThresh ) { m_bDone = true; break; } // replace i-th bit with the last one m_uMask &= ~( 1UL<> m_uMaskEnd ) & 1 ) << i; // set i-th bit to end bit m_uMaskEnd--; m_dChildren.RemoveFast ( i ); m_pCurDoc.RemoveFast ( i ); m_pCurHit.RemoveFast ( i ); i--; } // early out if ( m_bDone ) return NULL; // main loop DWORD uTouched = 0; // bitmask of children that actually produced matches this time int iDoc = 0; bool bDone = false; CSphRowitem * pDocinfo = m_pDocinfo; while ( iDocm_uDocid && m_pCurDoc[i]->m_uDocid!=DOCID_MAX ); if ( m_pCurDoc[i]->m_uDocid < tCand.m_uDocid ) { tCand = *m_pCurDoc[i]; iCandMatches = (m_uMask >> i) & 1; } else if ( m_pCurDoc[i]->m_uDocid==tCand.m_uDocid ) { tCand.m_uDocFields |= m_pCurDoc[i]->m_uDocFields; // non necessary tCand.m_fTFIDF += m_pCurDoc[i]->m_fTFIDF; iCandMatches += (m_uMask >> i) & 1; } } // submit match if ( iCandMatches>=m_iThresh ) CopyExtDoc ( m_dDocs[iDoc++], tCand, &pDocinfo, m_iStride ); // advance children ARRAY_FOREACH ( i, m_pCurDoc ) if ( m_pCurDoc[i]->m_uDocid==tCand.m_uDocid ) { if ( iCandMatches>=m_iThresh ) uTouched |= ( 1UL<m_uDocid!=DOCID_MAX ) continue; if ( uTouched & ( 1UL<GetDocsChunk ( NULL ); if ( m_pCurDoc[i] ) continue; if ( m_dChildren.GetLength()==m_iThresh ) { bDone = m_bDone = true; break; } // replace i-th bit with the last one m_uMask &= ~( 1UL<> m_uMaskEnd ) & 1 ) << i; // set i-th bit to end bit m_uMaskEnd--; uTouched &= ~(1UL<> (m_dChildren.GetLength()-1) ) & 1UL ) << i; m_dChildren.RemoveFast ( i ); m_pCurDoc.RemoveFast ( i ); m_pCurHit.RemoveFast ( i ); i--; } } return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtQuorum_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t uMaxID ) { // warmup ARRAY_FOREACH ( i, m_pCurHit ) if ( !m_pCurHit[i] || m_pCurHit[i]->m_uDocid==DOCID_MAX ) m_pCurHit[i] = m_dChildren[i]->GetHitsChunk ( pDocs, uMaxID ); // main loop int iHit = 0; while ( iHitm_uDocid!=DOCID_MAX ); m_uMatchedDocid = Min ( m_uMatchedDocid, m_pCurHit[i]->m_uDocid ); } if ( m_uMatchedDocid==DOCID_MAX ) break; } // emit that id while possible // OPTIMIZE: full linear scan for min pos and emission, eww int iMinChild = -1; DWORD uMinPos = UINT_MAX; ARRAY_FOREACH ( i, m_pCurHit ) if ( m_pCurHit[i] && m_pCurHit[i]->m_uDocid==m_uMatchedDocid ) if ( HITMAN::GetLCS ( m_pCurHit[i]->m_uHitpos ) < uMinPos ) { uMinPos = HITMAN::GetLCS ( m_pCurHit[i]->m_uHitpos ); // !COMMIT bench/fix, is LCS right here? iMinChild = i; } if ( iMinChild<0 ) { m_uMatchedDocid = 0; continue; } m_dHits[iHit++] = *m_pCurHit[iMinChild]; m_pCurHit[iMinChild]++; if ( m_pCurHit[iMinChild]->m_uDocid==DOCID_MAX ) m_pCurHit[iMinChild] = m_dChildren[iMinChild]->GetHitsChunk ( pDocs, uMaxID ); } assert ( iHit>=0 && iHit & dChildren, const ISphQwordSetup & tSetup ) : m_dChildren ( dChildren ) , m_bDone ( false ) , m_uHitsOverFor ( 0 ) { int iChildren = dChildren.GetLength(); assert ( iChildren>=2 ); m_pDocs.Resize ( iChildren ); m_pHits.Resize ( iChildren ); m_pDocsChunk.Resize ( iChildren ); m_dMaxID.Resize ( iChildren ); m_dMyHits[0].m_uDocid = DOCID_MAX; if ( dChildren.GetLength()>0 ) m_iAtomPos = dChildren[0]->m_iAtomPos; ARRAY_FOREACH ( i, dChildren ) { assert ( m_dChildren[i] ); m_pDocs[i] = NULL; m_pHits[i] = NULL; } AllocDocinfo ( tSetup ); } void ExtOrder_c::Reset ( const ISphQwordSetup & tSetup ) { m_bDone = false; m_uHitsOverFor = 0; m_dMyHits[0].m_uDocid = DOCID_MAX; ARRAY_FOREACH ( i, m_dChildren ) { assert ( m_dChildren[i] ); m_dChildren[i]->Reset ( tSetup ); m_pDocs[i] = NULL; m_pHits[i] = NULL; } } ExtOrder_c::~ExtOrder_c () { ARRAY_FOREACH ( i, m_dChildren ) SafeDelete ( m_dChildren[i] ); } int ExtOrder_c::GetNextHit ( SphDocID_t uDocid ) { // OPTIMIZE! implement PQ instead of full-scan DWORD uMinPos = UINT_MAX; int iChild = -1; ARRAY_FOREACH ( i, m_dChildren ) { // is this child over? if ( !m_pHits[i] ) continue; // skip until proper hit while ( m_pHits[i]->m_uDocid < uDocid ) m_pHits[i]++; // hit-chunk over? request next one, and rescan if ( m_pHits[i]->m_uDocid==DOCID_MAX ) { m_pHits[i] = m_dChildren[i]->GetHitsChunk ( m_pDocsChunk[i], m_dMaxID[i] ); i--; continue; } // is this our man at all? if ( m_pHits[i]->m_uDocid==uDocid ) { // is he the best we can get? if ( HITMAN::GetLCS ( m_pHits[i]->m_uHitpos ) < uMinPos ) { uMinPos = HITMAN::GetLCS ( m_pHits[i]->m_uHitpos ); iChild = i; } } } return iChild; } int ExtOrder_c::GetMatchingHits ( SphDocID_t uDocid, ExtHit_t * pHitbuf, int iLimit ) { // my trackers CSphVector dAccLongest; CSphVector dAccRecent; int iPosLongest = 0; // needed to handle cases such as "a b c" << a int iPosRecent = 0; int iField = -1; dAccLongest.Reserve ( m_dChildren.GetLength() ); dAccRecent.Reserve ( m_dChildren.GetLength() ); // while there's enough space in the buffer int iMyHit = 0; while ( iMyHit+m_dChildren.GetLength()m_uDocid==uDocid ); // most recent subseq must never be longer assert ( dAccRecent.GetLength()<=dAccLongest.GetLength() ); // handle that hit! int iHitField = HITMAN::GetField ( pHit->m_uHitpos ); int iHitPos = HITMAN::GetPos ( pHit->m_uHitpos ); if ( iHitField!=iField ) { // new field; reset both trackers dAccLongest.Resize ( 0 ); dAccRecent.Resize ( 0 ); // initial seeding, if needed if ( iChild==0 ) { dAccLongest.Add ( *pHit ); iPosLongest = iHitPos + pHit->m_uSpanlen; iField = iHitField; } } else if ( iChild==dAccLongest.GetLength() && iHitPos>=iPosLongest ) { // it fits longest tracker dAccLongest.Add ( *pHit ); iPosLongest = iHitPos + pHit->m_uSpanlen; // fully matched subsequence if ( dAccLongest.GetLength()==m_dChildren.GetLength() ) { // flush longest tracker into buffer, and keep it terminated ARRAY_FOREACH ( i, dAccLongest ) pHitbuf[iMyHit++] = dAccLongest[i]; // reset both trackers dAccLongest.Resize ( 0 ); dAccRecent.Resize ( 0 ); iPosRecent = iPosLongest; } } else if ( iChild==0 ) { // it restarts most-recent tracker dAccRecent.Resize ( 0 ); dAccRecent.Add ( *pHit ); iPosRecent = iHitPos + pHit->m_uSpanlen; if ( !dAccLongest.GetLength() ) { dAccLongest.Add ( *pHit ); iPosLongest = iHitPos + pHit->m_uSpanlen; } } else if ( iChild==dAccRecent.GetLength() && iHitPos>=iPosRecent ) { // it fits most-recent tracker dAccRecent.Add ( *pHit ); iPosRecent = iHitPos + pHit->m_uSpanlen; // maybe most-recent just became longest too? if ( dAccRecent.GetLength()==dAccLongest.GetLength() ) { dAccLongest.SwapData ( dAccRecent ); dAccRecent.Resize ( 0 ); iPosLongest = iPosRecent; } } // advance hit stream m_pHits[iChild]++; } assert ( iMyHit>=0 && iMyHitGetDocsChunk ( &m_dMaxID[i] ); if ( !m_pDocs[i] ) { m_bDone = true; return NULL; } } // match, while there's enough space in buffers CSphRowitem * pDocinfo = m_pDocinfo; int iDoc = 0; int iMyHit = 0; while ( iDocm_uDocid; assert ( uDocid!=DOCID_MAX ); for ( int i=1; im_uDocid < uDocid ) m_pDocs[i]++; // block end marker? pull next block and keep scanning if ( m_pDocs[i]->m_uDocid==DOCID_MAX ) { m_pDocs[i] = m_pDocsChunk[i] = m_dChildren[i]->GetDocsChunk ( &m_dMaxID[i] ); if ( !m_pDocs[i] ) { m_bDone = true; return ReturnDocsChunk ( iDoc, pMaxID ); } continue; } // too big id? its out next candidate if ( m_pDocs[i]->m_uDocid > uDocid ) { uDocid = m_pDocs[i]->m_uDocid; i = 0; continue; } assert ( m_pDocs[i]->m_uDocid==uDocid ); i++; } #ifndef NDEBUG assert ( uDocid!=DOCID_MAX ); ARRAY_FOREACH ( i, m_dChildren ) { assert ( m_pDocs[i] ); assert ( m_pDocs[i]->m_uDocid==uDocid ); } #endif // prefetch hits ARRAY_FOREACH ( i, m_dChildren ) { if ( !m_pHits[i] ) m_pHits[i] = m_dChildren[i]->GetHitsChunk ( m_pDocsChunk[i], m_dMaxID[i] ); // every document comes with at least one hit // and we did not yet process current candidate's hits // so we MUST have hits at this point no matter what assert ( m_pHits[i] ); } // match and save hits int iGotHits = GetMatchingHits ( uDocid, m_dMyHits+iMyHit, MAX_HITS-1-iMyHit ); if ( iGotHits ) { CopyExtDoc ( m_dDocs[iDoc++], *m_pDocs[0], &pDocinfo, m_iStride ); iMyHit += iGotHits; } // advance doc stream m_pDocs[0]++; if ( m_pDocs[0]->m_uDocid==DOCID_MAX ) { m_pDocs[0] = m_pDocsChunk[0] = m_dChildren[0]->GetDocsChunk ( &m_dMaxID[0] ); if ( !m_pDocs[0] ) { m_bDone = true; break; } } } return ReturnDocsChunk ( iDoc, pMaxID ); } const ExtHit_t * ExtOrder_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t ) { if ( pDocs->m_uDocid==m_uHitsOverFor ) return NULL; // copy accumulated hits while we can SphDocID_t uFirstMatch = pDocs->m_uDocid; const ExtHit_t * pMyHits = m_dMyHits; int iHit = 0; for ( ;; ) { while ( pDocs->m_uDocid!=pMyHits->m_uDocid ) { while ( pDocs->m_uDocid < pMyHits->m_uDocid ) pDocs++; if ( pDocs->m_uDocid==DOCID_MAX ) break; while ( pMyHits->m_uDocid < pDocs->m_uDocid ) pMyHits++; if ( pMyHits->m_uDocid==DOCID_MAX ) break; } if ( pDocs->m_uDocid==DOCID_MAX || pMyHits->m_uDocid==DOCID_MAX ) break; assert ( pDocs->m_uDocid==pMyHits->m_uDocid ); while ( pDocs->m_uDocid==pMyHits->m_uDocid ) m_dHits[iHit++] = *pMyHits++; assert ( iHitm_uDocid==DOCID_MAX ) { // ...all of them! setup the next run to check for trailing hits m_dMyHits[0].m_uDocid = DOCID_MAX; } else { // ...but not all of them! we ran out of docs earlier; hence, trailing hits are of no interest m_uHitsOverFor = uFirstMatch; } } else { // we did not copy any hits; check for trailing ones as the last resort if ( pDocs->m_uDocid!=DOCID_MAX ) { iHit = GetMatchingHits ( pDocs->m_uDocid, m_dHits, MAX_HITS-1 ); } if ( !iHit ) { // actually, not *only* in this case, also in partial buffer case // but for simplicity, lets just run one extra GetHitsChunk() iteration m_uHitsOverFor = uFirstMatch; } } // all done assert ( iHitGetQwords ( hQwords ); } void ExtOrder_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { ARRAY_FOREACH ( i, m_dChildren ) m_dChildren[i]->SetQwordsIDF ( hQwords ); } ////////////////////////////////////////////////////////////////////////// ExtUnit_c::ExtUnit_c ( ExtNode_i * pFirst, ExtNode_i * pSecond, const CSphSmallBitvec& uFields, const ISphQwordSetup & tSetup, const char * sUnit ) { m_pArg1 = pFirst; m_pArg2 = pSecond; XQKeyword_t tDot; tDot.m_sWord = sUnit; m_pDot = new ExtTerm_c ( CreateQueryWord ( tDot, tSetup ), uFields, tSetup, true ); m_uHitsOverFor = 0; m_uTailDocid = 0; m_uTailSentenceEnd = 0; m_pDocs1 = NULL; m_pDocs2 = NULL; m_pDotDocs = NULL; m_pDoc1 = NULL; m_pDoc2 = NULL; m_pDotDoc = NULL; m_pHit1 = NULL; m_pHit2 = NULL; m_pDotHit = NULL; m_dMyHits[0].m_uDocid = DOCID_MAX; } ExtUnit_c::~ExtUnit_c () { SafeDelete ( m_pArg1 ); SafeDelete ( m_pArg2 ); } void ExtUnit_c::Reset ( const ISphQwordSetup & tSetup ) { m_pArg1->Reset ( tSetup ); m_pArg2->Reset ( tSetup ); m_pDot->Reset ( tSetup ); m_dMyHits[0].m_uDocid = DOCID_MAX; } void ExtUnit_c::GetQwords ( ExtQwordsHash_t & hQwords ) { m_pArg1->GetQwords ( hQwords ); m_pArg2->GetQwords ( hQwords ); } void ExtUnit_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { m_pArg1->SetQwordsIDF ( hQwords ); m_pArg2->SetQwordsIDF ( hQwords ); } /// skips hits until their docids are less than the given limit static inline void SkipHitsLtDocid ( const ExtHit_t * (*ppHits), SphDocID_t uMatch, ExtNode_i * pNode, const ExtDoc_t * pDocs ) { for ( ;; ) { const ExtHit_t * pHit = *ppHits; if ( !pHit || pHit->m_uDocid==DOCID_MAX ) { pHit = *ppHits = pNode->GetHitsChunk ( pDocs, DOCID_MAX ); // OPTIMIZE? use that max? if ( !pHit ) return; } while ( pHit->m_uDocid < uMatch ) pHit++; *ppHits = pHit; if ( pHit->m_uDocid!=DOCID_MAX ) return; } } /// skips hits within current document while their position is less or equal than the given limit /// returns true if a matching hit (with big enough position, and in current document) was found /// returns false otherwise static inline bool SkipHitsLtePos ( const ExtHit_t * (*ppHits), Hitpos_t uPos, ExtNode_i * pNode, const ExtDoc_t * pDocs ) { SphDocID_t uDocid = (*ppHits)->m_uDocid; for ( ;; ) { const ExtHit_t * pHit = *ppHits; if ( !pHit || pHit->m_uDocid==DOCID_MAX ) { pHit = *ppHits = pNode->GetHitsChunk ( pDocs, DOCID_MAX ); // OPTIMIZE? use that max? if ( !pHit ) return false; } while ( pHit->m_uDocid==uDocid && pHit->m_uHitpos<=uPos ) pHit++; *ppHits = pHit; if ( pHit->m_uDocid!=DOCID_MAX ) return ( pHit->m_uDocid==uDocid ); } } int ExtUnit_c::FilterHits ( int iMyHit, DWORD uSentenceEnd, SphDocID_t uDocid, int * pDoc ) { while ( iMyHitm_uDocid==uDocid && m_pHit1->m_uHitposm_uDocid==uDocid && m_pHit2->m_uHitposm_uDocid==uDocid && m_pHit2->m_uDocid==uDocid ) continue; // no more in-sentence hits, but perhaps more sentences in this document else break; // document is over } // register document as matching if ( pDoc ) { ExtDoc_t & tDoc = m_dDocs [ (*pDoc)++ ]; tDoc.m_uDocid = m_pDoc1->m_uDocid; tDoc.m_uDocFields = m_pDoc1->m_uDocFields | m_pDoc2->m_uDocFields; // non necessary tDoc.m_uHitlistOffset = -1; tDoc.m_fTFIDF = m_pDoc1->m_fTFIDF + m_pDoc2->m_fTFIDF; tDoc.m_pDocinfo = NULL; // no inline support, sorry pDoc = NULL; // just once } if ( bValid1 && ( !bValid2 || m_pHit1->m_uHitpos < m_pHit2->m_uHitpos ) ) { m_dMyHits[iMyHit++] = *m_pHit1++; if ( m_pHit1->m_uDocid==DOCID_MAX ) m_pHit1 = m_pArg1->GetHitsChunk ( m_pDocs1, 0 ); } else { m_dMyHits[iMyHit++] = *m_pHit2++; if ( m_pHit2->m_uDocid==DOCID_MAX ) m_pHit2 = m_pArg2->GetHitsChunk ( m_pDocs2, 0 ); } } else { // no sentence matched yet // let's check the next hit pair assert ( m_pHit1->m_uDocid==uDocid ); assert ( m_pHit2->m_uDocid==uDocid ); assert ( m_pDotHit->m_uDocid==uDocid ); // our current hit pair locations DWORD uMin = Min ( m_pHit1->m_uHitpos, m_pHit2->m_uHitpos ); DWORD uMax = Max ( m_pHit1->m_uHitpos, m_pHit2->m_uHitpos ); // skip all dots beyond the min location if ( !SkipHitsLtePos ( &m_pDotHit, uMin, m_pDot, m_pDotDocs ) ) { // we have a match! // moreover, no more dots past min location in current document // copy hits until next document uSentenceEnd = UINT_MAX; continue; } // does the first post-pair-start dot separate our hit pair? if ( m_pDotHit->m_uHitpos < uMax ) { // yes, got an "A dot B" case // rewind candidate hits past this dot, break if current document is over if ( !SkipHitsLtePos ( &m_pHit1, m_pDotHit->m_uHitpos, m_pArg1, m_pDocs1 ) ) break; if ( !SkipHitsLtePos ( &m_pHit2, m_pDotHit->m_uHitpos, m_pArg2, m_pDocs2 ) ) break; continue; } else { // we have a match! // copy hits until next dot if ( !SkipHitsLtePos ( &m_pDotHit, uMax, m_pDot, m_pDotDocs ) ) uSentenceEnd = UINT_MAX; // correction, no next dot, so make it "next document" else uSentenceEnd = m_pDotHit->m_uHitpos; assert ( uSentenceEnd ); } } } m_uTailSentenceEnd = uSentenceEnd; // just in case tail hits loop will happen return iMyHit; } void ExtUnit_c::SkipTailHits () { m_uTailDocid = 0; m_pDoc1++; m_pDoc2++; } const ExtDoc_t * ExtUnit_c::GetDocsChunk ( SphDocID_t * pMaxID ) { // SENTENCE operator is essentially AND on steroids // that also takes relative dot positions into account // // when document matches both args but not the dot, it degenerates into AND // we immediately lookup and copy matching document hits anyway, though // this is suboptimal (because these hits might never be required at all) // but this is expected to be rare case, so let's keep code simple // // when document matches both args and the dot, we need to filter the hits // only those left/right pairs that are not (!) separated by a dot should match int iDoc = 0; int iMyHit = 0; if ( m_uTailDocid ) SkipTailHits(); while ( iMyHitm_uDocid==DOCID_MAX ) { m_pDoc1 = m_pDocs1 = m_pArg1->GetDocsChunk ( NULL ); if ( !m_pDoc1 ) break; // node is over } if ( !m_pDoc2 || m_pDoc2->m_uDocid==DOCID_MAX ) { m_pDoc2 = m_pDocs2 = m_pArg2->GetDocsChunk ( NULL ); if ( !m_pDoc2 ) break; // node is over } // find next candidate match while ( m_pDoc1->m_uDocid!=m_pDoc2->m_uDocid && m_pDoc1->m_uDocid!=DOCID_MAX && m_pDoc2->m_uDocid!=DOCID_MAX ) { while ( m_pDoc1->m_uDocid < m_pDoc2->m_uDocid && m_pDoc2->m_uDocid!=DOCID_MAX ) m_pDoc1++; while ( m_pDoc1->m_uDocid > m_pDoc2->m_uDocid && m_pDoc1->m_uDocid!=DOCID_MAX ) m_pDoc2++; } // got our candidate that matches AND? SphDocID_t uDocid = m_pDoc1->m_uDocid; if ( m_pDoc1->m_uDocid==DOCID_MAX || m_pDoc2->m_uDocid==DOCID_MAX ) continue; // yes, now fetch more dots docs, if needed // note how NULL is accepted here, "A and B but no dots" case is valid! if ( !m_pDotDoc || m_pDotDoc->m_uDocid==DOCID_MAX ) m_pDotDoc = m_pDotDocs = m_pDot->GetDocsChunk ( NULL ); // skip preceding docs while ( m_pDotDoc && m_pDotDoc->m_uDocid < uDocid ) { while ( m_pDotDoc->m_uDocid < uDocid ) m_pDotDoc++; if ( m_pDotDoc->m_uDocid==DOCID_MAX ) m_pDotDoc = m_pDotDocs = m_pDot->GetDocsChunk ( NULL ); } // we will need document hits on both routes below SkipHitsLtDocid ( &m_pHit1, uDocid, m_pArg1, m_pDocs1 ); SkipHitsLtDocid ( &m_pHit2, uDocid, m_pArg2, m_pDocs2 ); assert ( m_pHit1->m_uDocid==uDocid ); assert ( m_pHit2->m_uDocid==uDocid ); DWORD uSentenceEnd = 0; if ( !m_pDotDoc || m_pDotDoc->m_uDocid!=uDocid ) { // no dots in current document? // just copy all hits until next document uSentenceEnd = UINT_MAX; } else { // got both hits and dots // rewind to relevant dots hits, then do sentence boundary detection SkipHitsLtDocid ( &m_pDotHit, uDocid, m_pDot, m_pDotDocs ); } // do those hits iMyHit = FilterHits ( iMyHit, uSentenceEnd, uDocid, &iDoc ); // out of matching hits buffer? gotta return docs chunk now, then if ( iMyHit==MAX_HITS-1 ) { // mark a possibility of some trailing hits for current dot, if any if ( ( m_pHit1 && m_pHit1->m_uDocid==uDocid ) || ( m_pHit2 && m_pHit2->m_uDocid==uDocid ) ) { m_uTailDocid = uDocid; // yep, do check that tail } else { SkipTailHits(); // nope, both hit lists are definitely over } return ReturnDocsChunk ( iDoc, iMyHit, pMaxID ); } // all hits copied; do the next candidate m_pDoc1++; m_pDoc2++; } return ReturnDocsChunk ( iDoc, iMyHit, pMaxID ); } const ExtHit_t * ExtUnit_c::GetHitsChunk ( const ExtDoc_t * pDocs, SphDocID_t ) { SphDocID_t uFirstMatch = pDocs->m_uDocid; // current hits chunk already returned if ( m_uHitsOverFor==uFirstMatch ) { // and there are no trailing hits? bail if ( !m_uTailDocid ) return NULL; // and there might be trailing hits for the last document? try and loop them int iMyHit = FilterHits ( 0, m_uTailSentenceEnd, m_uTailDocid, NULL ); if ( !iMyHit ) { // no trailing hits were there actually m_uTailDocid = 0; m_pDoc1++; m_pDoc2++; return NULL; } // ok, we got some trailing hits! // check whether we might have even more if (!( iMyHit==MAX_HITS-1 && m_pHit1 && m_pHit1->m_uDocid==m_uTailDocid && m_pHit2 && m_pHit2->m_uDocid==m_uTailDocid )) { // nope, both hit lists are definitely over now m_uTailDocid = 0; m_pDoc1++; m_pDoc2++; } // return those trailing hits assert ( iMyHitm_uDocid!=pDocs->m_uDocid ) { while ( pDocs->m_uDocid < pMyHit->m_uDocid && pDocs->m_uDocid!=DOCID_MAX ) pDocs++; if ( pDocs->m_uDocid==DOCID_MAX ) break; while ( pMyHit->m_uDocid < pDocs->m_uDocid ) pMyHit++; } // out of hits if ( pMyHit->m_uDocid==DOCID_MAX || pDocs->m_uDocid==DOCID_MAX ) { // there still might be trailing hits // if so, they will be handled on next entry m_uHitsOverFor = uFirstMatch; if ( pDocs->m_uDocid==DOCID_MAX && m_uTailDocid ) SkipTailHits(); break; } // copy while ( pMyHit->m_uDocid==pDocs->m_uDocid ) m_dHits[iHit++] = *pMyHit++; if ( pMyHit->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; break; } } assert ( iHit>=0 && iHitDebugDump(0); #endif m_pDoclist = NULL; m_pHitlist = NULL; m_uMaxID = 0; m_uPayloadMask = 0; m_iQwords = 0; m_pIndex = tSetup.m_pIndex; m_pCtx = tSetup.m_pCtx; m_dZones = tXQ.m_dZones; m_dZoneStart.Resize ( m_dZones.GetLength() ); m_dZoneEnd.Resize ( m_dZones.GetLength() ); m_dZoneMax.Resize ( m_dZones.GetLength() ); m_dZoneMin.Resize ( m_dZones.GetLength() ); m_dZoneMax.Fill ( 0 ); m_dZoneMin.Fill ( DOCID_MAX ); ARRAY_FOREACH ( i, m_dZones ) { XQKeyword_t tDot; tDot.m_sWord.SetSprintf ( "%c%s", MAGIC_CODE_ZONE, m_dZones[i].cstr() ); m_dZoneStartTerm.Add ( new ExtTerm_c ( CreateQueryWord ( tDot, tSetup ), tSetup ) ); m_dZoneStart[i] = NULL; tDot.m_sWord.SetSprintf ( "%c/%s", MAGIC_CODE_ZONE, m_dZones[i].cstr() ); m_dZoneEndTerm.Add ( new ExtTerm_c ( CreateQueryWord ( tDot, tSetup ), tSetup ) ); m_dZoneEnd[i] = NULL; } } ExtRanker_c::~ExtRanker_c () { SafeDelete ( m_pRoot ); ARRAY_FOREACH ( i, m_dZones ) { SafeDelete ( m_dZoneStartTerm[i] ); SafeDelete ( m_dZoneEndTerm[i] ); } } void ExtRanker_c::Reset ( const ISphQwordSetup & tSetup ) { if ( m_pRoot ) m_pRoot->Reset ( tSetup ); ARRAY_FOREACH ( i, m_dZones ) { m_dZoneStartTerm[i]->Reset ( tSetup ); m_dZoneEndTerm[i]->Reset ( tSetup ); m_dZoneStart[i] = NULL; m_dZoneEnd[i] = NULL; } m_dZoneMax.Fill ( 0 ); m_dZoneMin.Fill ( DOCID_MAX ); m_hZoneInfo.Reset(); } const ExtDoc_t * ExtRanker_c::GetFilteredDocs () { for ( ;; ) { // get another chunk m_uMaxID = 0; const ExtDoc_t * pCand = m_pRoot->GetDocsChunk ( &m_uMaxID ); if ( !pCand ) return NULL; // create matches, and filter them int iDocs = 0; while ( pCand->m_uDocid!=DOCID_MAX ) { m_tTestMatch.m_iDocID = pCand->m_uDocid; if ( pCand->m_pDocinfo ) memcpy ( m_tTestMatch.m_pDynamic, pCand->m_pDocinfo, m_iInlineRowitems*sizeof(CSphRowitem) ); if ( m_pIndex->EarlyReject ( m_pCtx, m_tTestMatch ) ) { pCand++; continue; } m_dMyDocs[iDocs] = *pCand; m_tTestMatch.m_iWeight = (int)( (pCand->m_fTFIDF+0.5f)*SPH_BM25_SCALE ); // FIXME! bench bNeedBM25 Swap ( m_tTestMatch, m_dMyMatches[iDocs] ); iDocs++; pCand++; } // clean up zone hash if ( m_uMaxID!=DOCID_MAX ) { ARRAY_FOREACH ( i, m_dZoneMin ) { SphDocID_t uMinDocid = m_dZoneMin[i]; if ( uMinDocid==DOCID_MAX ) continue; Verify ( m_hZoneInfo.IterateStart ( ZoneKey_t ( i, uMinDocid ) ) ); uMinDocid = DOCID_MAX; do { ZoneKey_t tKey = m_hZoneInfo.IterateGetKey(); if ( tKey.m_iZone!=i || tKey.m_uDocid>m_uMaxID ) { uMinDocid = ( tKey.m_iZone==i ) ? tKey.m_uDocid : DOCID_MAX; break; } m_hZoneInfo.Delete ( tKey ); } while ( m_hZoneInfo.IterateNext() ); m_dZoneMin[i] = uMinDocid; } } if ( iDocs ) { m_dMyDocs[iDocs].m_uDocid = DOCID_MAX; return m_dMyDocs; } } } void ExtRanker_c::SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { m_iQwords = hQwords.GetLength (); m_iMaxQuerypos = 0; hQwords.IterateStart(); while ( hQwords.IterateNext() ) m_iMaxQuerypos = Max ( m_iMaxQuerypos, hQwords.IterateGet().m_iQueryPos ); if ( m_pRoot ) m_pRoot->SetQwordsIDF ( hQwords ); } SphZoneHit_e ExtRanker_c::IsInZone ( int iZone, const ExtHit_t * pHit ) { // quick route, we have current docid cached ZoneKey_t tKey ( iZone, pHit->m_uDocid ); // OPTIMIZE? allow 2-component hash keys maybe? ZoneInfo_t * pZone = m_hZoneInfo ( tKey ); if ( pZone ) { // remove end markers that might mess up ordering Hitpos_t uPos = HITMAN::GetLCS ( pHit->m_uHitpos ); int iSpan = FindSpan ( pZone->m_dStarts, uPos ); return ( iSpan>=0 && uPos<=pZone->m_dEnds[iSpan] ) ? SPH_ZONE_FOUND : SPH_ZONE_NO_SPAN; } // is there any zone info for this document at all? if ( pHit->m_uDocid<=m_dZoneMax[iZone] ) return SPH_ZONE_NO_DOCUMENT; // long route, read in zone info for all (!) the documents until next requested // that's because we might be queried out of order // current chunk const ExtDoc_t * pStart = m_dZoneStart[iZone]; const ExtDoc_t * pEnd = m_dZoneEnd[iZone]; // now keep caching spans until we see current id while ( pHit->m_uDocid > m_dZoneMax[iZone] ) { // get more docs if needed if ( ( !pStart && m_dZoneMax[iZone]!=DOCID_MAX ) || pStart->m_uDocid==DOCID_MAX ) { pStart = m_dZoneStartTerm[iZone]->GetDocsChunk ( NULL ); if ( !pStart ) { m_dZoneMax[iZone] = DOCID_MAX; return SPH_ZONE_NO_DOCUMENT; } } if ( ( !pEnd && m_dZoneMax[iZone]!=DOCID_MAX ) || pEnd->m_uDocid==DOCID_MAX ) { pEnd = m_dZoneEndTerm[iZone]->GetDocsChunk ( NULL ); if ( !pEnd ) { m_dZoneMax[iZone] = DOCID_MAX; return SPH_ZONE_NO_DOCUMENT; } } assert ( pStart && pEnd ); // skip zone starts past already cached stuff while ( pStart->m_uDocid<=m_dZoneMax[iZone] ) pStart++; if ( pStart->m_uDocid==DOCID_MAX ) continue; // skip zone ends until a match with start while ( pEnd->m_uDocidm_uDocid ) pEnd++; if ( pEnd->m_uDocid==DOCID_MAX ) continue; // handle mismatching start/end ids // (this must never happen normally, but who knows what data we're fed) assert ( pStart->m_uDocid!=DOCID_MAX ); assert ( pEnd->m_uDocid!=DOCID_MAX ); assert ( pStart->m_uDocid<=pEnd->m_uDocid ); if ( pStart->m_uDocid!=pEnd->m_uDocid ) { while ( pStart->m_uDocid < pEnd->m_uDocid ) pStart++; if ( pStart->m_uDocid==DOCID_MAX ) continue; } // first matching uncached docid found! assert ( pStart->m_uDocid==pEnd->m_uDocid ); assert ( pStart->m_uDocid > m_dZoneMax[iZone] ); // but maybe we don't need docid this big just yet? if ( pStart->m_uDocid > pHit->m_uDocid ) { // store current in-chunk positions m_dZoneStart[iZone] = pStart; m_dZoneEnd[iZone] = pEnd; // no zone info for all those precending documents (including requested one) m_dZoneMax[iZone] = pStart->m_uDocid-1; return SPH_ZONE_NO_DOCUMENT; } // cache all matching docs from current chunks below requested docid // (there might be more matching docs, but we are lazy and won't cache them upfront) ExtDoc_t dCache [ ExtNode_i::MAX_DOCS ]; int iCache = 0; while ( pStart->m_uDocid<=pHit->m_uDocid ) { // match if ( pStart->m_uDocid==pEnd->m_uDocid ) { dCache[iCache++] = *pStart; pStart++; pEnd++; continue; } // mismatch! // this must not really happen, starts/ends must be in sync // but let's be graceful anyway, and just skip to next match if ( pStart->m_uDocid==DOCID_MAX || pEnd->m_uDocid==DOCID_MAX ) break; while ( pStart->m_uDocid < pEnd->m_uDocid ) pStart++; if ( pStart->m_uDocid==DOCID_MAX ) break; while ( pEnd->m_uDocid < pStart->m_uDocid ) pEnd++; if ( pEnd->m_uDocid==DOCID_MAX ) break; } // should have found at least one id to cache assert ( iCache ); assert ( iCache < ExtNode_i::MAX_DOCS ); dCache[iCache].m_uDocid = DOCID_MAX; // do caching const ExtHit_t * pStartHits = m_dZoneStartTerm[iZone]->GetHitsChunk ( dCache, DOCID_MAX ); const ExtHit_t * pEndHits = m_dZoneEndTerm[iZone]->GetHitsChunk ( dCache, DOCID_MAX ); // loop documents one by one while ( pStartHits && pEndHits ) { // load all hits for current document SphDocID_t uCur = pStartHits->m_uDocid; tKey.m_uDocid = uCur; pZone = m_hZoneInfo.AddUnique ( ZoneInfo_t(), tKey ); // load all the start hits for it while ( pStartHits ) { while ( pStartHits->m_uDocid==uCur ) { pZone->m_dStarts.Add ( pStartHits->m_uHitpos ); pStartHits++; } if ( pStartHits->m_uDocid!=DOCID_MAX ) break; pStartHits = m_dZoneStartTerm[iZone]->GetHitsChunk ( dCache, DOCID_MAX ); } // load all the end hits for it assert ( pEndHits->m_uDocid==uCur ); while ( pEndHits ) { while ( pEndHits->m_uDocid==uCur ) { pZone->m_dEnds.Add ( pEndHits->m_uHitpos ); pEndHits++; } if ( pEndHits->m_uDocid!=DOCID_MAX ) break; pEndHits = m_dZoneEndTerm[iZone]->GetHitsChunk ( dCache, DOCID_MAX ); } // data sanity checks assert ( pZone->m_dStarts.GetLength()==pZone->m_dEnds.GetLength() ); // update cache status m_dZoneMax[iZone] = uCur; m_dZoneMin[iZone] = Min ( m_dZoneMin[iZone], uCur ); } } // store current in-chunk positions m_dZoneStart[iZone] = pStart; m_dZoneEnd[iZone] = pEnd; // cached a bunch of spans, try our check again tKey.m_uDocid = pHit->m_uDocid; pZone = m_hZoneInfo ( tKey ); if ( pZone ) { // remove end markers that might mess up ordering Hitpos_t uPos = HITMAN::GetLCS ( pHit->m_uHitpos ); int iSpan = FindSpan ( pZone->m_dStarts, uPos ); return ( iSpan>=0 && uPos<=pZone->m_dEnds[iSpan] ) ? SPH_ZONE_FOUND : SPH_ZONE_NO_SPAN; } return SPH_ZONE_NO_DOCUMENT; } ////////////////////////////////////////////////////////////////////////// template < bool USE_BM25 > int ExtRanker_WeightSum_c::GetMatches () { if ( !m_pRoot ) return 0; const ExtDoc_t * pDoc = m_pDoclist; int iMatches = 0; while ( iMatchesm_uDocid==DOCID_MAX ) pDoc = GetFilteredDocs (); if ( !pDoc ) { m_pDoclist = NULL; return iMatches; } DWORD uRank = 0; DWORD uMask = pDoc->m_uDocFields; if ( !uMask ) { // possible if we have more than 32 fields // honestly loading all hits etc is cumbersome, so let's just fake it uRank = 1; } else { // just sum weights over the lowest 32 fields int iWeights = Min ( m_iWeights, 32 ); for ( int i=0; im_uDocFields & (1<m_uDocid==DOCID_MAX ) pDoc = GetFilteredDocs (); if ( !pDoc ) { m_pDoclist = NULL; return iMatches; } Swap ( m_dMatches[iMatches], m_dMyMatches[pDoc-m_dMyDocs] ); // OPTIMIZE? can avoid this swap and simply return m_dMyMatches (though in lesser chunks) m_dMatches[iMatches].m_iWeight = 1; iMatches++; pDoc++; } m_pDoclist = pDoc; return iMatches; } ////////////////////////////////////////////////////////////////////////// template < typename STATE > int ExtRanker_T::GetMatches () { if ( !m_pRoot ) return 0; int iMatches = 0; const ExtHit_t * pHlist = m_pHitlist; const ExtDoc_t * pDocs = m_pDoclist; // warmup if necessary if ( !pHlist ) { if ( !pDocs ) pDocs = GetFilteredDocs (); if ( !pDocs ) return iMatches; pHlist = m_pRoot->GetHitsChunk ( pDocs, m_uMaxID ); if ( !pHlist ) return iMatches; } // main matching loop const ExtDoc_t * pDoc = pDocs; for ( SphDocID_t uCurDocid=0; iMatchesm_uDocid==uCurDocid ) m_tState.Update ( pHlist++ ); // if hits block is over, get next block, but do *not* flush current doc if ( pHlist->m_uDocid==DOCID_MAX ) { assert ( pDocs ); pHlist = m_pRoot->GetHitsChunk ( pDocs, m_uMaxID ); if ( pHlist ) continue; } // otherwise (new match or no next hits block), flush current doc if ( uCurDocid ) { assert ( uCurDocid==pDoc->m_uDocid ); Swap ( m_dMatches[iMatches], m_dMyMatches[pDoc-m_dMyDocs] ); m_dMatches[iMatches].m_iWeight = m_tState.Finalize ( m_dMatches[iMatches] ); iMatches++; } // boundary checks if ( !pHlist ) { // there are no more hits for current docs block; do we have a next one? assert ( pDocs ); pDoc = pDocs = GetFilteredDocs (); // we don't, so bail out if ( !pDocs ) break; // we do, get some hits pHlist = m_pRoot->GetHitsChunk ( pDocs, m_uMaxID ); assert ( pHlist ); // fresh docs block, must have hits } // skip until next good doc/hit pair assert ( pDoc->m_uDocid<=pHlist->m_uDocid ); while ( pDoc->m_uDocidm_uDocid ) pDoc++; assert ( pDoc->m_uDocid==pHlist->m_uDocid ); uCurDocid = pHlist->m_uDocid; } m_pDoclist = pDocs; m_pHitlist = pHlist; return iMatches; } ////////////////////////////////////////////////////////////////////////// #if USE_WINDOWS #pragma warning(disable:4127) // conditional expr is const for MSVC #endif template < bool USE_BM25, bool HANDLE_DUPES > struct RankerState_Proximity_fn { BYTE m_uLCS[SPH_MAX_FIELDS]; BYTE m_uCurLCS; int m_iExpDelta; int m_iFields; const int * m_pWeights; DWORD m_uLcsTailPos; DWORD m_uLcsTailQposMask; DWORD m_uCurQposMask; DWORD m_uCurPos; bool Init ( int iFields, const int * pWeights, ExtRanker_c *, CSphString & ) { memset ( m_uLCS, 0, sizeof(m_uLCS) ); m_uCurLCS = 0; m_iExpDelta = -INT_MAX; m_iFields = iFields; m_pWeights = pWeights; m_uLcsTailPos = 0; m_uLcsTailQposMask = 0; m_uCurQposMask = 0; m_uCurPos = 0; return true; } void Update ( const ExtHit_t * pHlist ) { if ( !HANDLE_DUPES ) { // all query keywords are unique // simpler path (just do the delta) int iDelta = HITMAN::GetLCS ( pHlist->m_uHitpos ) - pHlist->m_uQuerypos; if ( iDelta==m_iExpDelta ) m_uCurLCS = m_uCurLCS + BYTE(pHlist->m_uWeight); else m_uCurLCS = BYTE(pHlist->m_uWeight); DWORD uField = HITMAN::GetField ( pHlist->m_uHitpos ); if ( m_uCurLCS>m_uLCS[uField] ) m_uLCS[uField] = m_uCurLCS; m_iExpDelta = iDelta + pHlist->m_uSpanlen - 1; // !COMMIT why spanlen?? } else { // keywords are duplicated in the query // so there might be multiple qpos entries sharing the same hitpos DWORD uPos = HITMAN::GetLCS ( pHlist->m_uHitpos ); DWORD uField = HITMAN::GetField ( pHlist->m_uHitpos ); if ( uPos!=m_uCurPos ) { // next new and shiny hitpos in line // FIXME!? what do we do with longer spans? keep looking? reset? if ( m_uCurLCS<2 ) { m_uLcsTailPos = m_uCurPos; m_uLcsTailQposMask = m_uCurQposMask; m_uCurLCS = 1; // FIXME!? can this ever be different? ("a b" c) maybe.. } m_uCurQposMask = 0; m_uCurPos = uPos; if ( m_uLCS[uField] < pHlist->m_uWeight ) m_uLCS[uField] = BYTE(pHlist->m_uWeight); } // add that qpos to current qpos mask (for the current hitpos) m_uCurQposMask |= ( 1UL << pHlist->m_uQuerypos ); // and check if that results in a better lcs match now int iDelta = m_uCurPos - m_uLcsTailPos; if ( ( m_uCurQposMask >> iDelta ) & m_uLcsTailQposMask ) { // cool, it matched! m_uLcsTailQposMask = ( 1UL << pHlist->m_uQuerypos ); // our lcs span now ends with a specific qpos m_uLcsTailPos = m_uCurPos; // and in a specific position m_uCurLCS = BYTE ( m_uCurLCS + pHlist->m_uWeight ); // and it's longer m_uCurQposMask = 0; // and we should avoid matching subsequent hits on the same hitpos // update per-field vector if ( m_uCurLCS>m_uLCS[uField] ) m_uLCS[uField] = m_uCurLCS; } } } DWORD Finalize ( const CSphMatch & tMatch ) { m_uCurLCS = 0; m_iExpDelta = -1; if ( HANDLE_DUPES ) { m_uLcsTailPos = 0; m_uLcsTailQposMask = 0; m_uCurQposMask = 0; m_uCurPos = 0; } DWORD uRank = 0; for ( int i=0; im_iMaxQuerypos; return true; } void Update ( const ExtHit_t * pHlist ) { // upd LCS DWORD uField = HITMAN::GetField ( pHlist->m_uHitpos ); int iDelta = HITMAN::GetLCS ( pHlist->m_uHitpos ) - pHlist->m_uQuerypos; if ( iDelta==m_iExpDelta && HITMAN::GetLCS ( pHlist->m_uHitpos )>=m_uMinExpPos ) { m_uCurLCS = m_uCurLCS + BYTE(pHlist->m_uWeight); if ( HITMAN::IsEnd ( pHlist->m_uHitpos ) && (int)pHlist->m_uQuerypos==m_iMaxQuerypos && HITMAN::GetPos ( pHlist->m_uHitpos )==m_iMaxQuerypos ) m_uExactHit |= ( 1UL << HITMAN::GetField ( pHlist->m_uHitpos ) ); } else { m_uCurLCS = BYTE(pHlist->m_uWeight); if ( HITMAN::GetPos ( pHlist->m_uHitpos )==1 ) { m_uHeadHit |= ( 1UL << HITMAN::GetField ( pHlist->m_uHitpos ) ); if ( HITMAN::IsEnd ( pHlist->m_uHitpos ) && m_iMaxQuerypos==1 ) m_uExactHit |= ( 1UL << HITMAN::GetField ( pHlist->m_uHitpos ) ); } } if ( m_uCurLCS>m_uLCS[uField] ) m_uLCS[uField] = m_uCurLCS; m_iExpDelta = iDelta + pHlist->m_uSpanlen - 1; m_uMinExpPos = HITMAN::GetLCS ( pHlist->m_uHitpos ) + 1; } DWORD Finalize ( const CSphMatch & tMatch ) { m_uCurLCS = 0; m_iExpDelta = -1; DWORD uRank = 0; for ( int i=0; i>i)&1) + ((m_uExactHit>>i)&1) )*m_pWeights[i]; m_uLCS[i] = 0; } m_uHeadHit = 0; m_uExactHit = 0; return tMatch.m_iWeight + uRank*SPH_BM25_SCALE; } }; template < bool USE_BM25 > struct RankerState_ProximityPayload_fn : public RankerState_Proximity_fn { DWORD m_uPayloadRank; DWORD m_uPayloadMask; bool Init ( int iFields, const int * pWeights, ExtRanker_c * pRanker, CSphString & sError ) { RankerState_Proximity_fn::Init ( iFields, pWeights, pRanker, sError ); m_uPayloadRank = 0; m_uPayloadMask = pRanker->m_uPayloadMask; return true; } void Update ( const ExtHit_t * pHlist ) { DWORD uField = HITMAN::GetField ( pHlist->m_uHitpos ); if ( ( 1<m_uPayloadRank += HITMAN::GetPos ( pHlist->m_uHitpos ) * this->m_pWeights[uField]; else RankerState_Proximity_fn::Update ( pHlist ); } DWORD Finalize ( const CSphMatch & tMatch ) { // as usual, redundant 'this' is just because gcc is stupid this->m_uCurLCS = 0; this->m_iExpDelta = -1; DWORD uRank = m_uPayloadRank; for ( int i=0; im_iFields; i++ ) { // no special care for payload fields as their LCS will be 0 anyway uRank += this->m_uLCS[i]*this->m_pWeights[i]; this->m_uLCS[i] = 0; } m_uPayloadRank = 0; return USE_BM25 ? tMatch.m_iWeight + uRank*SPH_BM25_SCALE : uRank; } }; ////////////////////////////////////////////////////////////////////////// struct RankerState_MatchAny_fn : public RankerState_Proximity_fn { int m_iPhraseK; BYTE m_uMatchMask[SPH_MAX_FIELDS]; bool Init ( int iFields, const int * pWeights, ExtRanker_c * pRanker, CSphString & sError ) { RankerState_Proximity_fn::Init ( iFields, pWeights, pRanker, sError ); m_iPhraseK = 0; for ( int i=0; im_iQwords; memset ( m_uMatchMask, 0, sizeof(m_uMatchMask) ); return true; } void Update ( const ExtHit_t * pHlist ) { RankerState_Proximity_fn::Update ( pHlist ); m_uMatchMask [ HITMAN::GetField ( pHlist->m_uHitpos ) ] |= ( 1<<(pHlist->m_uQuerypos-1) ); } DWORD Finalize ( const CSphMatch & ) { m_uCurLCS = 0; m_iExpDelta = -1; DWORD uRank = 0; for ( int i=0; im_uHitpos ) ]; } DWORD Finalize ( const CSphMatch & ) { DWORD uRes = m_uRank; m_uRank = 0; return uRes; } }; ////////////////////////////////////////////////////////////////////////// struct RankerState_Fieldmask_fn { DWORD m_uRank; bool Init ( int, const int *, ExtRanker_c *, CSphString & ) { m_uRank = 0; return true; } void Update ( const ExtHit_t * pHlist ) { m_uRank |= 1UL << HITMAN::GetField ( pHlist->m_uHitpos ); } DWORD Finalize ( const CSphMatch & ) { DWORD uRes = m_uRank; m_uRank = 0; return uRes; } }; ////////////////////////////////////////////////////////////////////////// // EXPRESSION RANKER ////////////////////////////////////////////////////////////////////////// /// ranker state that computes weight dynamically based on user supplied expression (formula) struct RankerState_Expr_fn { public: // per-field and per-document stuff BYTE m_uLCS[SPH_MAX_FIELDS]; BYTE m_uMatchMask[SPH_MAX_FIELDS]; BYTE m_uCurLCS; int m_iExpDelta; int m_iFields; const int * m_pWeights; DWORD m_uDocBM25; DWORD m_uMatchedFields; int m_iCurrentField; DWORD m_uHitCount[SPH_MAX_FIELDS]; DWORD m_uWordCount[SPH_MAX_FIELDS]; CSphVector m_dIDF; float m_dTFIDF[SPH_MAX_FIELDS]; int m_iMinHitPos[SPH_MAX_FIELDS]; int m_iMinBestSpanPos[SPH_MAX_FIELDS]; int m_iMaxQuerypos; DWORD m_uExactHit; CSphBitvec m_tKeywordMask; DWORD m_uDocWordCount; int m_iMaxWindowHits[SPH_MAX_FIELDS]; const char * m_sExpr; ISphExpr * m_pExpr; ESphAttr m_eExprType; const CSphSchema * m_pSchema; // per-query stuff int m_iMaxLCS; int m_iQueryWordCount; public: // internal state, and factor settings CSphVector m_dWindow; int m_iWindowSize; public: RankerState_Expr_fn (); ~RankerState_Expr_fn (); bool Init ( int iFields, const int * pWeights, ExtRanker_c * pRanker, CSphString & sError ); void Update ( const ExtHit_t * pHlist ); DWORD Finalize ( const CSphMatch & tMatch ); }; /// extra expression ranker node types enum ExprRankerNode_e { // field level factors XRANK_LCS, XRANK_USER_WEIGHT, XRANK_HIT_COUNT, XRANK_WORD_COUNT, XRANK_TF_IDF, XRANK_MIN_HIT_POS, XRANK_MIN_BEST_SPAN_POS, XRANK_EXACT_HIT, XRANK_MAX_WINDOW_HITS, // document level factors XRANK_BM25, XRANK_MAX_LCS, XRANK_FIELD_MASK, XRANK_QUERY_WORD_COUNT, XRANK_DOC_WORD_COUNT, // field aggregation functions XRANK_SUM }; /// generic field factor template < typename T > struct Expr_FieldFactor_c : public ISphExpr { const int * m_pIndex; const T * m_pData; Expr_FieldFactor_c ( const int * pIndex, const T * pData ) : m_pIndex ( pIndex ) , m_pData ( pData ) {} float Eval ( const CSphMatch & ) const { return (float) m_pData [ *m_pIndex ]; } int IntEval ( const CSphMatch & ) const { return (int) m_pData [ *m_pIndex ]; } }; /// bitmask field factor specialization template<> struct Expr_FieldFactor_c : public ISphExpr { const int * m_pIndex; const DWORD * m_pData; Expr_FieldFactor_c ( const int * pIndex, const DWORD * pData ) : m_pIndex ( pIndex ) , m_pData ( pData ) {} float Eval ( const CSphMatch & ) const { return (float)( (*m_pData) >> (*m_pIndex) ); } int IntEval ( const CSphMatch & ) const { return (int)( (*m_pData) >> (*m_pIndex) ); } }; /// generic per-document int factor struct Expr_IntPtr_c : public ISphExpr { DWORD * m_pVal; explicit Expr_IntPtr_c ( DWORD * pVal ) : m_pVal ( pVal ) {} float Eval ( const CSphMatch & ) const { return (float)*m_pVal; } int IntEval ( const CSphMatch & ) const { return (int)*m_pVal; } }; /// function that sums sub-expressions over matched fields struct Expr_Sum_c : public ISphExpr { RankerState_Expr_fn * m_pState; ISphExpr * m_pArg; Expr_Sum_c ( RankerState_Expr_fn * pState, ISphExpr * pArg ) : m_pState ( pState ) , m_pArg ( pArg ) {} float Eval ( const CSphMatch & tMatch ) const { m_pState->m_iCurrentField = 0; float fRes = 0; DWORD uMask = m_pState->m_uMatchedFields; while ( uMask ) { if ( uMask & 1 ) fRes += m_pArg->Eval ( tMatch ); uMask >>= 1; m_pState->m_iCurrentField++; } return fRes; } int IntEval ( const CSphMatch & tMatch ) const { m_pState->m_iCurrentField = 0; int iRes = 0; DWORD uMask = m_pState->m_uMatchedFields; while ( uMask ) { if ( uMask & 1 ) iRes += m_pArg->IntEval ( tMatch ); uMask >>= 1; m_pState->m_iCurrentField++; } return iRes; } }; // FIXME! cut/pasted from sphinxexpr; remove dupe struct Expr_GetIntConst_c : public ISphExpr { int m_iValue; explicit Expr_GetIntConst_c ( int iValue ) : m_iValue ( iValue ) {} virtual float Eval ( const CSphMatch & ) const { return (float) m_iValue; } // no assert() here cause generic float Eval() needs to work even on int-evaluator tree virtual int IntEval ( const CSphMatch & ) const { return m_iValue; } virtual int64_t Int64Eval ( const CSphMatch & ) const { return m_iValue; } }; /// hook that exposes field-level factors, document-level factors, and matched field SUM() function to generic expressions class ExprRankerHook_c : public ISphExprHook { public: RankerState_Expr_fn * m_pState; const char * m_sCheckError; bool m_bCheckInFieldAggr; public: explicit ExprRankerHook_c ( RankerState_Expr_fn * pState ) : m_pState ( pState ) , m_sCheckError ( NULL ) , m_bCheckInFieldAggr ( false ) {} int IsKnownIdent ( const char * sIdent ) { // OPTIMIZE? hash this some nice long winter night? if ( !strcasecmp ( sIdent, "lcs" ) ) return XRANK_LCS; if ( !strcasecmp ( sIdent, "user_weight" ) ) return XRANK_USER_WEIGHT; if ( !strcasecmp ( sIdent, "hit_count" ) ) return XRANK_HIT_COUNT; if ( !strcasecmp ( sIdent, "word_count" ) ) return XRANK_WORD_COUNT; if ( !strcasecmp ( sIdent, "tf_idf" ) ) return XRANK_TF_IDF; if ( !strcasecmp ( sIdent, "min_hit_pos" ) ) return XRANK_MIN_HIT_POS; if ( !strcasecmp ( sIdent, "min_best_span_pos" ) ) return XRANK_MIN_BEST_SPAN_POS; if ( !strcasecmp ( sIdent, "exact_hit" ) ) return XRANK_EXACT_HIT; if ( !strcasecmp ( sIdent, "bm25" ) ) return XRANK_BM25; if ( !strcasecmp ( sIdent, "max_lcs" ) ) return XRANK_MAX_LCS; if ( !strcasecmp ( sIdent, "field_mask" ) ) return XRANK_FIELD_MASK; if ( !strcasecmp ( sIdent, "query_word_count" ) ) return XRANK_QUERY_WORD_COUNT; if ( !strcasecmp ( sIdent, "doc_word_count" ) ) return XRANK_DOC_WORD_COUNT; return -1; } int IsKnownFunc ( const char * sFunc ) { if ( !strcasecmp ( sFunc, "sum" ) ) return XRANK_SUM; if ( !strcasecmp ( sFunc, "max_window_hits" ) ) return XRANK_MAX_WINDOW_HITS; return -1; } ISphExpr * CreateNode ( int iID, ISphExpr * pLeft ) { int * pCF = &m_pState->m_iCurrentField; // just a shortcut switch ( iID ) { case XRANK_LCS: return new Expr_FieldFactor_c ( pCF, m_pState->m_uLCS ); case XRANK_USER_WEIGHT: return new Expr_FieldFactor_c ( pCF, m_pState->m_pWeights ); case XRANK_HIT_COUNT: return new Expr_FieldFactor_c ( pCF, m_pState->m_uHitCount ); case XRANK_WORD_COUNT: return new Expr_FieldFactor_c ( pCF, m_pState->m_uWordCount ); case XRANK_TF_IDF: return new Expr_FieldFactor_c ( pCF, m_pState->m_dTFIDF ); case XRANK_MIN_HIT_POS: return new Expr_FieldFactor_c ( pCF, m_pState->m_iMinHitPos ); case XRANK_MIN_BEST_SPAN_POS: return new Expr_FieldFactor_c ( pCF, m_pState->m_iMinBestSpanPos ); case XRANK_EXACT_HIT: return new Expr_FieldFactor_c ( pCF, &m_pState->m_uExactHit ); case XRANK_MAX_WINDOW_HITS: { CSphMatch tDummy; m_pState->m_iWindowSize = pLeft->IntEval ( tDummy ); // must be constant; checked in GetReturnType() return new Expr_FieldFactor_c ( pCF, m_pState->m_iMaxWindowHits ); } case XRANK_BM25: return new Expr_IntPtr_c ( &m_pState->m_uDocBM25 ); case XRANK_MAX_LCS: return new Expr_GetIntConst_c ( m_pState->m_iMaxLCS ); case XRANK_FIELD_MASK: return new Expr_IntPtr_c ( &m_pState->m_uMatchedFields ); case XRANK_QUERY_WORD_COUNT: return new Expr_GetIntConst_c ( m_pState->m_iQueryWordCount ); case XRANK_DOC_WORD_COUNT: return new Expr_IntPtr_c ( &m_pState->m_uDocWordCount ); case XRANK_SUM: return new Expr_Sum_c ( m_pState, pLeft ); default: return NULL; } } ESphAttr GetIdentType ( int iID ) { switch ( iID ) { case XRANK_LCS: // field-level case XRANK_USER_WEIGHT: case XRANK_HIT_COUNT: case XRANK_WORD_COUNT: case XRANK_MIN_HIT_POS: case XRANK_MIN_BEST_SPAN_POS: case XRANK_EXACT_HIT: case XRANK_MAX_WINDOW_HITS: case XRANK_BM25: // doc-level case XRANK_MAX_LCS: case XRANK_FIELD_MASK: case XRANK_QUERY_WORD_COUNT: case XRANK_DOC_WORD_COUNT: return SPH_ATTR_INTEGER; case XRANK_TF_IDF: return SPH_ATTR_FLOAT; default: assert ( 0 ); return SPH_ATTR_INTEGER; } } ESphAttr GetReturnType ( int iID, const CSphVector & dArgs, bool bAllConst, CSphString & sError ) { switch ( iID ) { case XRANK_SUM: if ( dArgs.GetLength()!=1 ) { sError = "SUM() requires 1 argument"; return SPH_ATTR_NONE; } return dArgs[0]; case XRANK_MAX_WINDOW_HITS: if ( dArgs.GetLength()!=1 || dArgs[0]!=SPH_ATTR_INTEGER || !bAllConst ) { sError = "MAX_WINDOW_HITS() requires 1 constant int argument"; return SPH_ATTR_NONE; } return SPH_ATTR_INTEGER; default: sError.SetSprintf ( "internal error: unknown hook function (id=%d)", iID ); } return SPH_ATTR_NONE; } void CheckEnter ( int iID ) { if ( !m_sCheckError ) switch ( iID ) { case XRANK_LCS: case XRANK_USER_WEIGHT: case XRANK_HIT_COUNT: case XRANK_WORD_COUNT: case XRANK_TF_IDF: case XRANK_MIN_HIT_POS: case XRANK_MIN_BEST_SPAN_POS: case XRANK_EXACT_HIT: case XRANK_MAX_WINDOW_HITS: if ( !m_bCheckInFieldAggr ) m_sCheckError = "field factors must only occur withing field aggregates in ranking expression"; break; case XRANK_SUM: if ( m_bCheckInFieldAggr ) m_sCheckError = "field aggregates can not be nested in ranking expression"; else m_bCheckInFieldAggr = true; break; default: assert ( iID>=0 ); return; } } void CheckExit ( int iID ) { if ( !m_sCheckError && iID==XRANK_SUM ) { assert ( m_bCheckInFieldAggr ); m_bCheckInFieldAggr = false; } } }; /// ctor RankerState_Expr_fn::RankerState_Expr_fn () : m_pWeights ( NULL ) , m_sExpr ( NULL ) , m_pExpr ( NULL ) , m_iMaxLCS ( 0 ) , m_iQueryWordCount ( 0 ) {} /// dtor RankerState_Expr_fn::~RankerState_Expr_fn () { SafeRelease ( m_pExpr ); } /// initialize ranker state bool RankerState_Expr_fn::Init ( int iFields, const int * pWeights, ExtRanker_c * pRanker, CSphString & sError ) { // cleanup factors memset ( m_uLCS, 0, sizeof(m_uLCS) ); memset ( m_uMatchMask, 0, sizeof(m_uMatchMask) ); m_uCurLCS = 0; m_iExpDelta = -INT_MAX; m_iFields = iFields; m_pWeights = pWeights; m_uDocBM25 = 0; m_uMatchedFields = 0; m_iCurrentField = 0; memset ( m_uHitCount, 0, sizeof(m_uHitCount) ); memset ( m_uWordCount, 0, sizeof(m_uWordCount) ); memset ( m_dTFIDF, 0, sizeof(m_dTFIDF) ); memset ( m_iMinHitPos, 0, sizeof(m_iMinHitPos) ); memset ( m_iMinBestSpanPos, 0, sizeof(m_iMinBestSpanPos) ); memset ( m_iMaxWindowHits, 0, sizeof(m_iMaxWindowHits) ); m_iMaxQuerypos = pRanker->m_iMaxQuerypos; m_uExactHit = 0; m_uDocWordCount = 0; m_iWindowSize = 1; // compute query level constants // max_lcs, aka m_iMaxLCS (for matchany ranker emulation) gets computed here // query_word_count, aka m_iQueryWordCount is set elsewhere (in SetQwordsIDF()) m_iMaxLCS = 0; for ( int i=0; im_iQwords; // parse expression bool bUsesWeight; ExprRankerHook_c tHook ( this ); m_pExpr = sphExprParse ( m_sExpr, *m_pSchema, &m_eExprType, &bUsesWeight, sError, NULL, &tHook ); if ( !m_pExpr ) return false; if ( m_eExprType!=SPH_ATTR_INTEGER && m_eExprType!=SPH_ATTR_FLOAT ) { sError = "ranking expression must evaluate to integer or float"; return false; } if ( bUsesWeight ) { sError = "ranking expression must not refer to WEIGHT()"; return false; } if ( tHook.m_sCheckError ) { sError = tHook.m_sCheckError; return false; } // all seems ok return true; } /// process next hit, update factors void RankerState_Expr_fn::Update ( const ExtHit_t * pHlist ) { const DWORD uField = HITMAN::GetField ( pHlist->m_uHitpos ); const int iPos = HITMAN::GetPos ( pHlist->m_uHitpos ); // update LCS int iDelta = HITMAN::GetLCS ( pHlist->m_uHitpos ) - pHlist->m_uQuerypos; if ( iDelta==m_iExpDelta ) { m_uCurLCS = m_uCurLCS + BYTE(pHlist->m_uWeight); if ( HITMAN::IsEnd ( pHlist->m_uHitpos ) && (int)pHlist->m_uQuerypos==m_iMaxQuerypos && iPos==m_iMaxQuerypos ) m_uExactHit |= ( 1UL << uField ); } else { m_uCurLCS = BYTE(pHlist->m_uWeight); if ( iPos==1 && HITMAN::IsEnd ( pHlist->m_uHitpos ) && m_iMaxQuerypos==1 ) m_uExactHit |= ( 1UL << uField ); } if ( m_uCurLCS>m_uLCS[uField] ) { m_uLCS[uField] = m_uCurLCS; m_iMinBestSpanPos[uField] = iPos - m_uCurLCS + 1; } m_iExpDelta = iDelta + pHlist->m_uSpanlen - 1; // update other stuff m_uMatchMask[uField] |= ( 1<<(pHlist->m_uQuerypos-1) ); m_uMatchedFields |= ( 1UL<m_uQuerypos ) ) { m_uHitCount[uField]++; m_uWordCount[uField] |= ( 1<m_uQuerypos ); m_uDocWordCount |= ( 1<m_uQuerypos ); } m_dTFIDF[uField] += m_dIDF [ pHlist->m_uQuerypos ]; if ( !m_iMinHitPos[uField] ) m_iMinHitPos[uField] = iPos; // update hit window, max_window_hits factor if ( m_dWindow.GetLength() ) { // sorted_remove_if ( _1 + winsize <= hitpos ) ) int i = 0; while ( im_uHitpos ) i++; for ( int j=0; jm_uHitpos ); m_iMaxWindowHits[uField] = Max ( m_iMaxWindowHits[uField], m_dWindow.GetLength() ); } /// finish document processing, compute weight from factors DWORD RankerState_Expr_fn::Finalize ( const CSphMatch & tMatch ) { // finishing touches m_uDocBM25 = tMatch.m_iWeight; for ( int i=0; iIntEval ( tMatch ) : (DWORD)m_pExpr->Eval ( tMatch ); // cleanup // OPTIMIZE? quick full wipe? (using dwords/sse/whatever) m_uCurLCS = 0; m_iExpDelta = -1; for ( int i=0; i { public: ExtRanker_Expr_c ( const XQQuery_t & tXQ, const ISphQwordSetup & tSetup, const char * sExpr, const CSphSchema & tSchema ) : ExtRanker_T ( tXQ, tSetup ) { // tricky bit, we stash the pointer to expr here, but it will be parsed // somewhat later during InitState() call, when IDFs etc are computed m_tState.m_sExpr = sExpr; m_tState.m_pSchema = &tSchema; } void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { // this sets m_iMaxQuerypos, setups terms etc ExtRanker_T::SetQwordsIDF ( hQwords ); // setup our own custom stuff, begin with IDFs m_tState.m_dIDF.Resize ( m_iMaxQuerypos+1 ); ARRAY_FOREACH ( i, m_tState.m_dIDF ) m_tState.m_dIDF[i] = 0.0f; m_tState.m_iQueryWordCount = 0; m_tState.m_tKeywordMask.Init ( m_iMaxQuerypos+1 ); hQwords.IterateStart(); while ( hQwords.IterateNext() ) { const int iPos = hQwords.IterateGet().m_iQueryPos; m_tState.m_dIDF [ iPos ] = hQwords.IterateGet().m_fIDF; m_tState.m_tKeywordMask.BitSet ( iPos ); // tricky bit // for query_word_count, we only want to count keywords that are not (!) excluded by the query // that is, in (aa NOT bb) case, we want a value of 1, not 2 if ( !hQwords.IterateGet().m_bExcluded ) m_tState.m_iQueryWordCount++; } } }; ////////////////////////////////////////////////////////////////////////// // RANKER FACTORY ////////////////////////////////////////////////////////////////////////// static void CheckQueryWord ( const char * szWord, CSphQueryResult * pResult, const CSphIndexSettings & tSettings, bool bStar ) { if ( ( !tSettings.m_iMinPrefixLen && !tSettings.m_iMinInfixLen ) || !bStar || !szWord ) return; int iLen = strlen ( szWord ); bool bHeadStar = szWord[0]=='*'; bool bTailStar = szWord[iLen-1]=='*'; int iLenWOStars = iLen - ( bHeadStar ? 1 : 0 ) - ( bTailStar ? 1 : 0 ); if ( bHeadStar || bTailStar ) { if ( tSettings.m_iMinInfixLen > 0 && iLenWOStars < tSettings.m_iMinInfixLen ) pResult->m_sWarning.SetSprintf ( "Query word length is less than min infix length. word: '%s' ", szWord ); else if ( tSettings.m_iMinPrefixLen > 0 && iLenWOStars < tSettings.m_iMinPrefixLen ) pResult->m_sWarning.SetSprintf ( "Query word length is less than min prefix length. word: '%s' ", szWord ); } } static void CheckExtendedQuery ( const XQNode_t * pNode, CSphQueryResult * pResult, const CSphIndexSettings & tSettings, bool bStar ) { ARRAY_FOREACH ( i, pNode->m_dWords ) CheckQueryWord ( pNode->m_dWords[i].m_sWord.cstr(), pResult, tSettings, bStar ); ARRAY_FOREACH ( i, pNode->m_dChildren ) CheckExtendedQuery ( pNode->m_dChildren[i], pResult, tSettings, bStar ); } struct ExtQwordOrderbyQueryPos_t { bool IsLess ( const ExtQword_t * pA, const ExtQword_t * pB ) const { return pA->m_iQueryPos < pB->m_iQueryPos; } }; static bool HasQwordDupes ( XQNode_t * pNode, SmallStringHash_T & hQwords ) { ARRAY_FOREACH ( i, pNode->m_dChildren ) if ( HasQwordDupes ( pNode->m_dChildren[i], hQwords ) ) return true; ARRAY_FOREACH ( i, pNode->m_dWords ) if ( !hQwords.Add ( 1, pNode->m_dWords[i].m_sWord ) ) return true; return false; } static bool HasQwordDupes ( XQNode_t * pNode ) { SmallStringHash_T hQwords; return HasQwordDupes ( pNode, hQwords ); } ISphRanker * sphCreateRanker ( const XQQuery_t & tXQ, const CSphQuery * pQuery, CSphQueryResult * pResult, const ISphQwordSetup & tTermSetup, const CSphQueryContext & tCtx ) { // shortcut const CSphIndex * pIndex = tTermSetup.m_pIndex; // check the keywords CheckExtendedQuery ( tXQ.m_pRoot, pResult, pIndex->GetSettings(), pIndex->IsStarEnabled() ); // fill payload mask DWORD uPayloadMask = 0; ARRAY_FOREACH ( i, pIndex->GetMatchSchema().m_dFields ) uPayloadMask |= pIndex->GetMatchSchema().m_dFields[i].m_bPayload << i; bool bSingleWord = tXQ.m_pRoot->m_dChildren.GetLength()==0 && tXQ.m_pRoot->m_dWords.GetLength()==1; bool bGotDupes = HasQwordDupes ( tXQ.m_pRoot ); // setup eval-tree ExtRanker_c * pRanker = NULL; switch ( pQuery->m_eRanker ) { case SPH_RANK_PROXIMITY_BM25: if ( uPayloadMask ) pRanker = new ExtRanker_T < RankerState_ProximityPayload_fn > ( tXQ, tTermSetup ); else if ( bSingleWord ) pRanker = new ExtRanker_WeightSum_c ( tXQ, tTermSetup ); else if ( bGotDupes ) pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); else pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_BM25: pRanker = new ExtRanker_WeightSum_c ( tXQ, tTermSetup ); break; case SPH_RANK_NONE: pRanker = new ExtRanker_None_c ( tXQ, tTermSetup ); break; case SPH_RANK_WORDCOUNT: pRanker = new ExtRanker_T < RankerState_Wordcount_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_PROXIMITY: if ( bSingleWord ) pRanker = new ExtRanker_WeightSum_c<> ( tXQ, tTermSetup ); else if ( bGotDupes ) pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); else pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_MATCHANY: pRanker = new ExtRanker_T < RankerState_MatchAny_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_FIELDMASK: pRanker = new ExtRanker_T < RankerState_Fieldmask_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_SPH04: pRanker = new ExtRanker_T < RankerState_ProximityBM25Exact_fn > ( tXQ, tTermSetup ); break; case SPH_RANK_EXPR: pRanker = new ExtRanker_Expr_c ( tXQ, tTermSetup, pQuery->m_sRankerExpr.cstr(), pIndex->GetMatchSchema() ); break; default: pResult->m_sWarning.SetSprintf ( "unknown ranking mode %d; using default", (int)pQuery->m_eRanker ); if ( bGotDupes ) pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); else pRanker = new ExtRanker_T < RankerState_Proximity_fn > ( tXQ, tTermSetup ); break; } assert ( pRanker ); pRanker->m_uPayloadMask = uPayloadMask; // setup IDFs ExtQwordsHash_t hQwords; pRanker->GetQwords ( hQwords ); const int iQwords = hQwords.GetLength (); const CSphSourceStats & tSourceStats = pIndex->GetStats(); CSphVector dWords; dWords.Reserve ( hQwords.GetLength() ); hQwords.IterateStart (); while ( hQwords.IterateNext() ) { ExtQword_t & tWord = hQwords.IterateGet (); // build IDF float fIDF = 0.0f; if ( tWord.m_iDocs ) { const int iTotalClamped = Max ( tSourceStats.m_iTotalDocuments, tWord.m_iDocs ); float fLogTotal = logf ( float ( 1+iTotalClamped ) ); fIDF = logf ( float ( iTotalClamped-tWord.m_iDocs+1 ) / float ( tWord.m_iDocs ) ) / ( 2*iQwords*fLogTotal ); } tWord.m_fIDF = fIDF; dWords.Add ( &tWord ); } dWords.Sort ( ExtQwordOrderbyQueryPos_t() ); ARRAY_FOREACH ( i, dWords ) { const ExtQword_t * pWord = dWords[i]; pResult->AddStat ( pWord->m_sDictWord, pWord->m_iDocs, pWord->m_iHits, pWord->m_bExpanded ); } pRanker->SetQwordsIDF ( hQwords ); if ( !pRanker->InitState ( tCtx, pResult->m_sError ) ) SafeDelete ( pRanker ); return pRanker; } ////////////////////////////////////////////////////////////////////////// /// HIT MARKER ////////////////////////////////////////////////////////////////////////// void CSphHitMarker::Mark ( CSphVector & dMarked ) { const ExtHit_t * pHits = NULL; const ExtDoc_t * pDocs = NULL; SphDocID_t uMaxID = 0; pDocs = m_pRoot->GetDocsChunk ( &uMaxID ); if ( !pDocs ) return; for ( ;; ) { pHits = m_pRoot->GetHitsChunk ( pDocs, uMaxID ); if ( !pHits ) break; for ( ; pHits->m_uDocid!=DOCID_MAX; pHits++ ) { SphHitMark_t tMark; tMark.m_uPosition = HITMAN::GetPos ( pHits->m_uHitpos ); tMark.m_uSpan = pHits->m_uMatchlen; dMarked.Add ( tMark ); } } } CSphHitMarker::~CSphHitMarker () { SafeDelete ( m_pRoot ); } CSphHitMarker * CSphHitMarker::Create ( const XQNode_t * pRoot, const ISphQwordSetup & tSetup ) { ExtNode_i * pNode = ExtNode_i::Create ( pRoot, tSetup ); if ( pNode ) { CSphHitMarker * pMarker = new CSphHitMarker; pMarker->m_pRoot = pNode; return pMarker; } return NULL; } ////////////////////////////////////////////////////////////////////////// // INTRA-BATCH CACHING ////////////////////////////////////////////////////////////////////////// /// container that does intra-batch query-sub-tree caching /// actually carries the cached data, NOT to be recreated frequently (see thin wrapper below) class NodeCacheContainer_t { private: friend class ExtNodeCached_t; friend class CSphQueryNodeCache; private: int m_iRefCount; bool m_StateOk; const ISphQwordSetup * m_pSetup; CSphVector m_Docs; CSphVector m_Hits; CSphVector m_InlineAttrs; int m_iAtomPos; // minimal position from original donor, used for shifting CSphQueryNodeCache * m_pNodeCache; public: NodeCacheContainer_t () : m_iRefCount ( 1 ) , m_StateOk ( true ) , m_pSetup ( NULL ) , m_iAtomPos ( 0 ) , m_pNodeCache ( NULL ) {} public: void Release() { if ( --m_iRefCount<=0 ) Invalidate(); } ExtNode_i * CreateCachedWrapper ( ExtNode_i* pChild, const XQNode_t * pRawChild, const ISphQwordSetup & tSetup ); private: bool WarmupCache ( ExtNode_i * pChild, int iQWords ); void Invalidate(); }; /// cached node wrapper to be injected into actual search trees /// (special container actually carries all the data and does the work, see blow) class ExtNodeCached_t : public ExtNode_i { friend class NodeCacheContainer_t; NodeCacheContainer_t * m_pNode; ExtDoc_t * m_pHitDoc; ///< points to entry in m_dDocs which GetHitsChunk() currently emits hits for SphDocID_t m_uHitsOverFor; ///< there are no more hits for matches block starting with this ID CSphString * m_pWarning; int64_t m_iMaxTimer; ///< work until this timestamp int m_iHitIndex; ///< store the current position in m_Hits for GetHitsChunk() int m_iDocIndex; ///< store the current position in m_Docs for GetDocsChunk() ExtNode_i * m_pChild; ///< pointer to donor for the sake of AtomPos procession int m_iQwords; ///< number of tokens in parent query void StepForwardToHitsFor ( SphDocID_t uDocId ); // creation possible ONLY via NodeCacheContainer_t explicit ExtNodeCached_t ( NodeCacheContainer_t * pNode, ExtNode_i * pChild ) : m_pNode ( pNode ) , m_pHitDoc ( NULL ) , m_uHitsOverFor ( 0 ) , m_pWarning ( NULL ) , m_iMaxTimer ( 0 ) , m_iHitIndex ( 0 ) , m_iDocIndex ( 0 ) , m_pChild ( pChild ) , m_iQwords ( 0 ) { m_iAtomPos = pChild->m_iAtomPos; } public: virtual ~ExtNodeCached_t () { SafeDelete ( m_pChild ); SafeRelease ( m_pNode ); } virtual void Reset ( const ISphQwordSetup & tSetup ) { if ( m_pChild ) m_pChild->Reset ( tSetup ); m_iHitIndex = 0; m_iDocIndex = 0; m_uHitsOverFor = 0; m_pHitDoc = NULL; m_iMaxTimer = 0; m_iMaxTimer = tSetup.m_iMaxTimer; m_pWarning = tSetup.m_pWarning; } virtual const ExtDoc_t * GetDocsChunk ( SphDocID_t * pMaxID ); virtual const ExtHit_t * GetHitsChunk ( const ExtDoc_t * pMatched, SphDocID_t uMaxID ); virtual void GetQwords ( ExtQwordsHash_t & hQwords ) { if ( m_pChild ) m_pChild->GetQwords ( hQwords ); } virtual void SetQwordsIDF ( const ExtQwordsHash_t & hQwords ) { m_iQwords = hQwords.GetLength(); if ( m_pNode->m_pSetup ) { if ( m_pChild ) m_pChild->SetQwordsIDF ( hQwords ); m_pNode->WarmupCache ( m_pChild, m_iQwords ); } } virtual bool GotHitless () { return ( m_pChild ) ? m_pChild->GotHitless() : false; } }; ////////////////////////////////////////////////////////////////////////// ExtNode_i * NodeCacheContainer_t::CreateCachedWrapper ( ExtNode_i * pChild, const XQNode_t * pRawChild, const ISphQwordSetup & tSetup ) { if ( !m_StateOk ) return pChild; // wow! we have a virgin! if ( !m_Docs.GetLength() ) { m_iRefCount = pRawChild->GetCount(); m_pSetup = &tSetup; } return new ExtNodeCached_t ( this, pChild ); } bool NodeCacheContainer_t::WarmupCache ( ExtNode_i * pChild, int iQwords ) { SphDocID_t pMaxID = 0; m_iAtomPos = pChild->m_iAtomPos; const ExtDoc_t * pChunk = pChild->GetDocsChunk ( &pMaxID ); int iStride = 0; assert ( m_pSetup ); if ( pChunk && pChunk->m_pDocinfo ) iStride = pChild->m_iStride; while ( pChunk ) { const ExtDoc_t * pChunkHits = pChunk; bool iHasDocs = false; for ( ; pChunk->m_uDocid!=DOCID_MAX; pChunk++ ) { m_Docs.Add ( *pChunk ); // exclude number or Qwords from FIDF m_Docs.Last().m_fTFIDF *= iQwords; m_pNodeCache->m_iMaxCachedDocs--; if ( iStride>0 ) { // since vector will relocate the data on resize, do NOT fill new m_pDocinfo right now int iLen = m_InlineAttrs.GetLength(); m_InlineAttrs.Resize ( iLen+iStride ); memcpy ( &m_InlineAttrs[iLen], pChunk->m_pDocinfo, iStride*sizeof(CSphRowitem) ); } iHasDocs = true; } const ExtHit_t * pHits = NULL; if ( iHasDocs ) { SphDocID_t uLastDocid = m_Docs.Last().m_uDocid; while ( ( pHits = pChild->GetHitsChunk ( pChunkHits, uLastDocid ) )!=NULL ) { for ( ; pHits->m_uDocid!=DOCID_MAX; pHits++ ) { m_Hits.Add ( *pHits ); m_pNodeCache->m_iMaxCachedHits--; } } } // too many values, stop caching if ( m_pNodeCache->m_iMaxCachedDocs<0 || m_pNodeCache->m_iMaxCachedHits<0 ) { Invalidate (); pChild->Reset ( *m_pSetup ); m_pSetup = NULL; return false; } pChunk = pChild->GetDocsChunk ( &pMaxID ); } if ( iStride ) ARRAY_FOREACH ( i, m_Docs ) m_Docs[i].m_pDocinfo = &m_InlineAttrs[i*iStride]; m_Docs.Add().m_uDocid = DOCID_MAX; m_Hits.Add().m_uDocid = DOCID_MAX; pChild->Reset ( *m_pSetup ); m_pSetup = NULL; return true; } void NodeCacheContainer_t::Invalidate() { m_pNodeCache->m_iMaxCachedDocs += m_Docs.GetLength(); m_pNodeCache->m_iMaxCachedHits += m_Docs.GetLength(); m_Docs.Reset(); m_InlineAttrs.Reset(); m_Hits.Reset(); m_StateOk = false; } ////////////////////////////////////////////////////////////////////////// void ExtNodeCached_t::StepForwardToHitsFor ( SphDocID_t uDocId ) { assert ( m_pNode ); assert ( m_pNode->m_StateOk ); CSphVector & dHits = m_pNode->m_Hits; int iEnd = dHits.GetLength()-1; if ( m_iHitIndex>=iEnd ) return; if ( dHits[m_iHitIndex].m_uDocid==uDocId ) return; // binary search for lower (most left) bound of the subset of values int iHitIndex = m_iHitIndex; // http://blog.gamedeff.com/?p=12 while ( iEnd-iHitIndex>1 ) { if ( uDocIddHits[iEnd].m_uDocid ) { m_iHitIndex = -1; return; } int iMid = iHitIndex + (iEnd-iHitIndex)/2; if ( dHits[iMid].m_uDocid>=uDocId ) iEnd = iMid; else iHitIndex = iMid; } m_iHitIndex = iEnd; } const ExtDoc_t * ExtNodeCached_t::GetDocsChunk ( SphDocID_t * pMaxID ) { if ( !m_pNode || !m_pChild ) return NULL; if ( !m_pNode->m_StateOk ) return m_pChild->GetDocsChunk ( pMaxID ); if ( m_iMaxTimer>0 && sphMicroTimer()>=m_iMaxTimer ) { if ( m_pWarning ) *m_pWarning = "query time exceeded max_query_time"; return NULL; } m_uMaxID = 0; int iDoc = Min ( m_iDocIndex+MAX_DOCS-1, m_pNode->m_Docs.GetLength()-1 ) - m_iDocIndex; memcpy ( &m_dDocs[0], &m_pNode->m_Docs[m_iDocIndex], sizeof(ExtDoc_t)*iDoc ); m_iDocIndex += iDoc; // funny trick based on the formula of FIDF calculation. for ( int i=0; im_StateOk ) return m_pChild->GetHitsChunk ( pMatched, uMaxID ); if ( !pMatched ) return NULL; SphDocID_t uFirstMatch = pMatched->m_uDocid; // aim to the right document ExtDoc_t * pDoc = m_pHitDoc; m_pHitDoc = NULL; if ( !pDoc ) { // if we already emitted hits for this matches block, do not do that again if ( uFirstMatch==m_uHitsOverFor ) return NULL; // early reject whole block if ( pMatched->m_uDocid > m_uMaxID ) return NULL; if ( m_uMaxID && m_dDocs[0].m_uDocid > uMaxID ) return NULL; // find match pDoc = m_dDocs; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched docs block is over for me, gimme another one } while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { m_uHitsOverFor = uFirstMatch; return NULL; // matched doc block did not yet begin for me, gimme another one } } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); // setup hitlist reader StepForwardToHitsFor ( pDoc->m_uDocid ); } // hit emission int iHit = 0; while ( iHitm_Hits[m_iHitIndex]; if ( tCachedHit.m_uDocid==DOCID_MAX ) break; if ( tCachedHit.m_uDocid!=pDoc->m_uDocid ) { // no more hits; get next acceptable document pDoc++; do { while ( pDoc->m_uDocid < pMatched->m_uDocid ) pDoc++; if ( pDoc->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched docs block is over for me, gimme another one while ( pMatched->m_uDocid < pDoc->m_uDocid ) pMatched++; if ( pMatched->m_uDocid==DOCID_MAX ) { pDoc = NULL; break; } // matched doc block did not yet begin for me, gimme another one } while ( pDoc->m_uDocid!=pMatched->m_uDocid ); if ( !pDoc ) break; assert ( pDoc->m_uDocid==pMatched->m_uDocid ); // setup hitlist reader StepForwardToHitsFor ( pDoc->m_uDocid ); continue; } m_iHitIndex++; m_dHits[iHit] = tCachedHit; m_dHits[iHit].m_uQuerypos = (WORD)( m_dHits[iHit].m_uQuerypos + m_iAtomPos - m_pNode->m_iAtomPos ); iHit++; } m_pHitDoc = pDoc; if ( iHit==0 || iHit=0 && iHit0 && iMaxCachedHits>0 && iMaxCachedDocs>0 ) { m_pPool = new NodeCacheContainer_t [ iCells ]; for ( int i=0; iGetOrder()>=0 ); return m_pPool [ pRawChild->GetOrder() ].CreateCachedWrapper ( pChild, pRawChild, tSetup ); } // // $Id: sphinxsearch.cpp 3111 2012-02-20 15:08:34Z klirichek $ // sphinx-2.0.4-release/src/sphinxstemru.inl0000644000176700017710000002447110563143003020012 0ustar deogardeogar// // $Id: sphinxstemru.inl 555 2007-02-09 19:05:39Z shodan $ // #undef LOC_TABLE_ENTRY #undef LOC_TABLE_INDEX #define LOC_TABLE_ENTRY LOC_PREFIX(stem_table_entry_) #define LOC_TABLE_INDEX LOC_PREFIX(stem_table_index_) struct LOC_TABLE_ENTRY { LOC_CHAR_TYPE suffix[8]; int remove, len; }; struct LOC_TABLE_INDEX { LOC_CHAR_TYPE first; int count; }; // TableStringN, where N is a number of chars #undef TS1 #undef TS2 #undef TS3 #undef TS4 #undef TS5 #define TS1(c1) { RUS::c1 } #define TS2(c1,c2) { RUS::c1, RUS::c2 } #define TS3(c1,c2,c3) { RUS::c1, RUS::c2, RUS::c3 } #define TS4(c1,c2,c3,c4) { RUS::c1, RUS::c2, RUS::c3, RUS::c4 } #define TS5(c1,c2,c3,c4,c5) { RUS::c1, RUS::c2, RUS::c3, RUS::c4, RUS::c5 } static LOC_TABLE_INDEX LOC_PREFIX(ru_adj_i)[] = { { RUS::E, 4 }, { RUS::I, 2 }, { RUS::IY, 4 }, { RUS::M, 7 }, { RUS::O, 2 }, { RUS::U, 2 }, { RUS::H, 2 }, { RUS::YU, 4 }, { RUS::YA, 4 }, }; static LOC_TABLE_ENTRY LOC_PREFIX(ru_adj)[] = { { TS2(E,E), 2 }, { TS2(I,E), 2 }, { TS2(Y,E), 2 }, { TS2(O,E), 2 }, { TS3(I,M,I), 3 }, { TS3(Y,M,I), 3 }, { TS2(E,IY), 2 }, { TS2(I,IY), 2 }, { TS2(Y,IY), 2 }, { TS2(O,IY), 2 }, { TS3(A,E,M), 0 }, { TS3(U,E,M), 0 }, { TS3(YA,E,M), 0 }, { TS2(E,M), 2 }, { TS2(I,M), 2 }, { TS2(Y,M), 2 }, { TS2(O,M), 2 }, { TS3(E,G,O), 3 }, { TS3(O,G,O), 3 }, { TS3(E,M,U), 3 }, { TS3(O,M,U), 3 }, { TS2(I,H), 2 }, { TS2(Y,H), 2 }, { TS2(E,YU), 2 }, { TS2(O,YU), 2 }, { TS2(U,YU), 2 }, { TS2(YU,YU), 2 }, { TS2(A,YA), 2 }, { TS2(YA,YA), 2 } }; static LOC_TABLE_INDEX LOC_PREFIX(ru_part_i)[] = { { RUS::A, 3 }, { RUS::M, 1 }, { RUS::N, 3 }, { RUS::O, 3 }, { RUS::Y, 3 }, { RUS::SH, 4 }, { RUS::SCH, 5 } }; static LOC_TABLE_ENTRY LOC_PREFIX(ru_part)[] = { { TS4(A,N,N,A), 2 }, { TS4(E,N,N,A), 2 }, { TS4(YA,N,N,A), 2 }, { TS3(YA,E,M), 2 }, { TS3(A,N,N), 1 }, { TS3(E,N,N), 1 }, { TS3(YA,N,N), 1 }, { TS4(A,N,N,O), 2 }, { TS4(E,N,N,O), 2 }, { TS4(YA,N,N,O), 2 }, { TS4(A,N,N,Y), 2 }, { TS4(E,N,N,Y), 2 }, { TS4(YA,N,N,Y), 2 }, { TS3(A,V,SH), 2 }, { TS3(I,V,SH), 3 }, { TS3(Y,V,SH), 3 }, { TS3(YA,V,SH), 2 }, { TS3(A,YU,SCH), 2 }, { TS2(A,SCH), 1 }, { TS3(YA,YU,SCH), 2 }, { TS2(YA,SCH), 1 }, { TS3(U,YU,SCH), 3 } }; static LOC_TABLE_INDEX LOC_PREFIX(ru_verb_i)[] = { { RUS::A, 7 }, { RUS::E, 9 }, { RUS::I, 4 }, { RUS::IY, 4 }, { RUS::L, 4 }, { RUS::M, 5 }, { RUS::O, 7 }, { RUS::T, 9 }, { RUS::Y, 3 }, { RUS::MYA, 10 }, { RUS::YU, 4 }, { RUS::YA, 1 } }; static LOC_TABLE_ENTRY LOC_PREFIX(ru_verb)[] = { { TS3(A,L,A), 3 }, { TS3(A,N,A), 3 }, { TS3(YA,L,A), 3 }, { TS3(YA,N,A), 3 }, { TS3(I,L,A), 3 }, { TS3(Y,L,A), 3 }, { TS3(E,N,A), 3 }, { TS4(A,E,T,E), 4 }, { TS4(A,IY,T,E), 4 }, { TS3(MYA,T,E), 3 }, { TS4(U,E,T,E), 4 }, { TS4(YA,E,T,E), 4 }, { TS4(YA,IY,T,E), 4 }, { TS4(E,IY,T,E), 4 }, { TS4(U,IY,T,E), 4 }, { TS3(I,T,E), 3 }, { TS3(A,L,I), 3 }, { TS3(YA,L,I), 3 }, { TS3(I,L,I), 3 }, { TS3(Y,L,I), 3 }, { TS2(A,IY), 2 }, { TS2(YA,IY), 2 }, { TS2(E,IY), 2 }, { TS2(U,IY), 2 }, { TS2(A,L), 2 }, { TS2(YA,L), 2 }, { TS2(I,L), 2 }, { TS2(Y,L), 2 }, { TS3(A,E,M), 3 }, { TS3(YA,E,M), 3 }, { TS3(U,E,M), 3 }, { TS2(I,M), 2 }, { TS2(Y,M), 2 }, { TS3(A,L,O), 3 }, { TS3(A,N,O), 3 }, { TS3(YA,L,O), 3 }, { TS3(YA,N,O), 3 }, { TS3(I,L,O), 3 }, { TS3(Y,L,O), 3 }, { TS3(E,N,O), 3 }, { TS3(A,E,T), 3 }, { TS3(A,YU,T), 3 }, { TS3(YA,E,T), 3 }, { TS3(YA,YU,T), 3 }, { TS2(YA,T), 2 }, { TS3(U,E,T), 3 }, { TS3(U,YU,T), 3 }, { TS2(I,T), 2 }, { TS2(Y,T), 2 }, { TS3(A,N,Y), 3 }, { TS3(YA,N,Y), 3 }, { TS3(E,N,Y), 3 }, { TS4(A,E,SH,MYA), 4 }, { TS4(U,E,SH,MYA), 4 }, { TS4(YA,E,SH,MYA), 4 }, { TS3(A,T,MYA), 3 }, { TS3(E,T,MYA), 3 }, { TS3(I,T,MYA), 3 }, { TS3(U,T,MYA), 3 }, { TS3(Y,T,MYA), 3 }, { TS3(I,SH,MYA), 3 }, { TS3(YA,T,MYA), 3 }, { TS2(A,YU), 2 }, { TS2(U,YU), 2 }, { TS2(YA,YU), 2 }, { TS1(YU), 1 }, { TS2(U,YA), 2 } }; static LOC_TABLE_INDEX LOC_PREFIX(ru_dear_i)[] = { { RUS::K, 3 }, { RUS::A, 2 }, { RUS::V, 2 }, { RUS::E, 2 }, { RUS::I, 4 }, { RUS::IY, 2 }, { RUS::M, 4 }, { RUS::O, 2 }, { RUS::U, 2 }, { RUS::H, 2 }, { RUS::YU, 2 } }; static LOC_TABLE_ENTRY LOC_PREFIX(ru_dear)[] = { { TS3(CH,E,K), 3 }, { TS3(CH,O,K), 3 }, { TS3(N,O,K), 3 }, { TS3(CH,K,A), 3 }, { TS3(N, K,A), 3 }, { TS4(CH,K,O,V), 4 }, { TS4(N, K,O,V), 4 }, { TS3(CH,K,E), 3 }, { TS3(N, K,E), 3 }, { TS3(CH,K,I), 3 }, { TS3(N, K,I), 3 }, { TS5(CH,K,A,M,I), 5 }, { TS5(N, K,A,M,I), 5 }, { TS4(CH,K,O,IY), 4 }, { TS4(N, K,O,IY), 4 }, { TS4(CH,K,A,M), 4 }, { TS4(N, K,A,M), 4 }, { TS4(CH,K,O,M), 4 }, { TS4(N, K,O,M), 4 }, { TS3(CH,K,O), 3 }, { TS3(N, K,O), 3 }, { TS3(CH,K,U), 3 }, { TS3(N, K,U), 3 }, { TS4(CH,K,A,H), 4 }, { TS4(N, K,A,H), 4 }, { TS4(CH,K,O,YU), 4 }, { TS4(N, K,O,YU), 4 } }; static LOC_TABLE_INDEX LOC_PREFIX(ru_noun_i)[] = { { RUS::A, 1 }, { RUS::V, 2 }, { RUS::E, 3 }, { RUS::I, 6 }, { RUS::IY, 4 }, { RUS::M, 5 }, { RUS::O, 1 }, { RUS::U, 1 }, { RUS::H, 3 }, { RUS::Y, 1 }, { RUS::MYA, 1 }, { RUS::YU, 3 }, { RUS::YA, 3 } }; static LOC_TABLE_ENTRY LOC_PREFIX(ru_noun)[] = { { TS1(A), 1 }, { TS2(E,V), 2 }, { TS2(O,V), 2 }, { TS2(I,E), 2 }, { TS2(MYA,E), 2 }, { TS1(E), 1 }, { TS4(I,YA,M,I),4 }, { TS3(YA,M,I), 3 }, { TS3(A,M,I), 3 }, { TS2(E,I), 2 }, { TS2(I,I), 2 }, { TS1(I), 1 }, { TS3(I,E,IY), 3 }, { TS2(E,IY), 2 }, { TS2(O,IY), 2 }, { TS2(I,IY), 2 }, { TS3(I,YA,M), 3 }, { TS2(YA,M), 2 }, { TS3(I,E,M), 3 }, { TS2(A,M), 2 }, { TS2(O,M), 2 }, { TS1(O), 1 }, { TS1(U), 1 }, { TS2(A,H), 2 }, { TS3(I,YA,H), 3 }, { TS2(YA,H), 2 }, { TS1(Y), 1 }, { TS1(MYA), 1 }, { TS2(I,YU), 2 }, { TS2(MYA,YU), 2 }, { TS1(YU), 1 }, { TS2(I,YA), 2 }, { TS2(MYA,YA), 2 }, { TS1(YA), 1 } }; int stem_ru_table_i ( LOC_CHAR_TYPE * word, int len, LOC_TABLE_ENTRY * table, LOC_TABLE_INDEX * itable, int icount ) { int i, j, k, m; LOC_CHAR_TYPE l = word[--len]; for ( i=0, j=0; ik ) continue; for ( ; j>=0; k--, j-- ) if ( word[k]!=table[i].suffix[j] ) break; if ( j>=0 ) continue; return table[i].remove; } return 0; } j += itable[i].count; } return 0; } #undef STEM_RU_FUNC #define STEM_RU_FUNC(func,table) \ int func ( LOC_CHAR_TYPE * word, int len ) \ { \ return stem_ru_table ( word, len, LOC_PREFIX(table), \ sizeof(LOC_PREFIX(table))/sizeof(LOC_TABLE_ENTRY) ); \ } #undef STEM_RU_FUNC_I #define STEM_RU_FUNC_I(table) \ int LOC_PREFIX(stem_##table##_i) ( LOC_CHAR_TYPE * word, int len ) \ { \ return stem_ru_table_i ( word, len, LOC_PREFIX(table), LOC_PREFIX(table##_i), \ sizeof(LOC_PREFIX(table##_i))/sizeof(LOC_TABLE_INDEX) ); \ } STEM_RU_FUNC_I(ru_adj) STEM_RU_FUNC_I(ru_part) STEM_RU_FUNC_I(ru_dear) STEM_RU_FUNC_I(ru_verb) STEM_RU_FUNC_I(ru_noun) static int LOC_PREFIX(stem_ru_adjectival) ( LOC_CHAR_TYPE * word, int len ) { register int i = LOC_PREFIX(stem_ru_adj_i) ( word, len ); if ( i ) i += LOC_PREFIX(stem_ru_part_i) ( word, len-i ); return i; } static int LOC_PREFIX(stem_ru_verb_ov) ( LOC_CHAR_TYPE * word, int len ) { register int i = LOC_PREFIX(stem_ru_verb_i) ( word, len ); if ( i && (len>=i+2) && word[len-i-2] == RUS::O && word[len-i-1] == RUS::V ) return i+2; return i; } void LOC_PREFIX(stem_ru_init) () { int i; #undef STEM_RU_INIT_TABLE #define STEM_RU_INIT_TABLE(table) \ for ( i=0; i=2 ) { if ( C(2)==RUS::I || C(2)==RUS::Y || C(2)==RUS::YA ) BRK(2); if ( C(2)==RUS::A ) { if ( C(3)==RUS::V && C(4)==RUS::A ) BRK(4); BRK(2); } } if ( len>=3 && XSUFF3 ( RUS::V, RUS::SH, RUS::I ) && ( C(4)==RUS::A || C(4)==RUS::I || C(4)==RUS::Y || C(4)==RUS::YA ) ) BRK(4); if ( len>=5 && XSUFF5 ( RUS::V, RUS::SH, RUS::I, RUS::S, RUS::MYA ) && ( C(6)==RUS::A || C(6)==RUS::I || C(6)==RUS::Y || C(6)==RUS::YA ) ) BRK(6); CHK ( stem_ru_adjectival ); if ( len>=2 && ( XSUFF2 ( RUS::S, RUS::MYA ) || XSUFF2 ( RUS::S, RUS::YA ) ) ) { len -= 2; CHK ( stem_ru_adjectival ); CHK ( stem_ru_verb_ov ); } else { CHK ( stem_ru_verb_ov ); } CHK ( stem_ru_noun_i ); break; } if ( len && ( W(1,RUS::IY) || W(1,RUS::I) ) ) len--; if ( len-r2>=3 && XSUFF3 ( RUS::O, RUS::S, RUS::T ) ) len -= 3; else if ( len-r2>=4 && XSUFF4 ( RUS::O, RUS::S, RUS::T, RUS::MYA ) ) len -= 4; if ( len>=3 && XSUFF3 ( RUS::E, RUS::IY, RUS::SH ) ) len -= 3; else if ( len>=4 && XSUFF4 ( RUS::E, RUS::IY, RUS::SH, RUS::E ) ) len -= 4; if ( len>=2 && XSUFF2 ( RUS::N, RUS::N ) ) len--; if ( len && W(1,RUS::MYA) ) len--; *((unsigned char*)(word+len)) = '\0'; } // undefine externally defined stuff #undef LOC_CHAR_TYPE #undef LOC_PREFIX #undef RUS // // $Id: sphinxstemru.inl 555 2007-02-09 19:05:39Z shodan $ // sphinx-2.0.4-release/src/sphinxrt.cpp0000644000176700017710000054775311723552202017141 0ustar deogardeogar// // $Id: sphinxrt.cpp 3128 2012-03-01 01:44:34Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxint.h" #include "sphinxrt.h" #include "sphinxsearch.h" #include "sphinxutils.h" #include #include #if USE_WINDOWS #include // for open(), close() #include #else #include #include #endif ////////////////////////////////////////////////////////////////////////// #define BINLOG_WRITE_BUFFER 256*1024 #define BINLOG_AUTO_FLUSH 1000000 #define BINLOG_RESTART_SIZE 128*1024*1024 #if USE_64BIT #define WORDID_MAX U64C(0xffffffffffffffff) #else #define WORDID_MAX 0xffffffffUL #endif // RT hitman typedef Hitman_c<8> HITMAN; ////////////////////////////////////////////////////////////////////////// #ifndef NDEBUG #define Verify(_expr) assert(_expr) #else #define Verify(_expr) _expr #endif ////////////////////////////////////////////////////////////////////////// // GLOBALS ////////////////////////////////////////////////////////////////////////// /// publicly exposed binlog interface ISphBinlog * g_pBinlog = NULL; /// actual binlog implementation class RtBinlog_c; static RtBinlog_c * g_pRtBinlog = NULL; /// protection from concurrent changes during binlog replay static bool g_bRTChangesAllowed = false; ////////////////////////////////////////////////////////////////////////// // !COMMIT cleanup extern ref to sphinx.cpp extern void sphSortDocinfos ( DWORD * pBuf, int iCount, int iStride ); // !COMMIT yes i am when debugging #ifndef NDEBUG #define PARANOID 1 #endif ////////////////////////////////////////////////////////////////////////// template < typename T, typename P > static inline void ZipT ( CSphVector < BYTE, P > * pOut, T uValue ) { do { BYTE bOut = (BYTE)( uValue & 0x7f ); uValue >>= 7; if ( uValue ) bOut |= 0x80; pOut->Add ( bOut ); } while ( uValue ); } #define SPH_MAX_KEYWORD_LEN (3*SPH_MAX_WORD_LEN+4) STATIC_ASSERT ( SPH_MAX_KEYWORD_LEN<255, MAX_KEYWORD_LEN_SHOULD_FITS_BYTE ); template < typename T > static inline const BYTE * UnzipT ( T * pValue, const BYTE * pIn ) { T uValue = 0; BYTE bIn; int iOff = 0; do { bIn = *pIn++; uValue += ( T ( bIn & 0x7f ) ) << iOff; iOff += 7; } while ( bIn & 0x80 ); *pValue = uValue; return pIn; } #define ZipDword ZipT #define ZipQword ZipT #define UnzipDword UnzipT #define UnzipQword UnzipT #if USE_64BIT #define ZipDocid ZipQword #define ZipWordid ZipQword #define UnzipDocid UnzipQword #define UnzipWordid UnzipQword #else #define ZipDocid ZipDword #define ZipWordid ZipDword #define UnzipDocid UnzipDword #define UnzipWordid UnzipDword #endif ////////////////////////////////////////////////////////////////////////// struct CmpHitPlain_fn { inline bool IsLess ( const CSphWordHit & a, const CSphWordHit & b ) { return ( a.m_iWordID struct RtDoc_T { DOCID m_uDocID; ///< my document id DWORD m_uDocFields; ///< fields mask DWORD m_uHits; ///< hit count DWORD m_uHit; ///< either index into segment hits, or the only hit itself (if hit count is 1) }; template < typename WORDID=SphWordID_t > struct RtWord_T { union { WORDID m_uWordID; ///< my keyword id const BYTE * m_sWord; }; DWORD m_uDocs; ///< document count (for stats and/or BM25) DWORD m_uHits; ///< hit count (for stats and/or BM25) DWORD m_uDoc; ///< index into segment docs }; typedef RtDoc_T<> RtDoc_t; typedef RtWord_T<> RtWord_t; struct RtWordCheckpoint_t { union { SphWordID_t m_iWordID; const char * m_sWord; }; int m_iOffset; }; class RtDiskKlist_t : public ISphNoncopyable { private: static const int MAX_SMALL_SIZE = 512; CSphVector < SphAttr_t > m_dLargeKlist; CSphOrderedHash < bool, SphDocID_t, IdentityHash_fn, MAX_SMALL_SIZE > m_hSmallKlist; mutable CSphRwlock m_tRwLargelock; mutable CSphRwlock m_tRwSmalllock; void NakedFlush(); // flush without lockers public: RtDiskKlist_t() { m_tRwLargelock.Init(); m_tRwSmalllock.Init(); } virtual ~RtDiskKlist_t() { m_tRwLargelock.Done(); m_tRwSmalllock.Done(); } void Reset (); void Flush() { if ( m_hSmallKlist.GetLength()==0 ) return; m_tRwSmalllock.WriteLock(); m_tRwLargelock.WriteLock(); NakedFlush(); m_tRwLargelock.Unlock(); m_tRwSmalllock.Unlock(); } void LoadFromFile ( const char * sFilename ); void SaveToFile ( const char * sFilename ); inline void Delete ( SphDocID_t uDoc ) { m_tRwSmalllock.WriteLock(); if ( !m_hSmallKlist.Exists ( uDoc ) ) m_hSmallKlist.Add ( true, uDoc ); if ( m_hSmallKlist.GetLength()>=MAX_SMALL_SIZE ) NakedFlush(); m_tRwSmalllock.Unlock(); } inline const SphAttr_t * GetKillList () const { return m_dLargeKlist.Begin(); } inline int GetKillListSize () const { return m_dLargeKlist.GetLength(); } inline bool KillListLock() const { return m_tRwLargelock.ReadLock(); } inline bool KillListUnlock() const { return m_tRwLargelock.Unlock(); } // NOT THREAD SAFE bool Exists ( SphDocID_t uDoc ) { return ( m_hSmallKlist.Exists ( uDoc ) || m_dLargeKlist.BinarySearch ( SphAttr_t(uDoc))!=NULL ); } }; void RtDiskKlist_t::Reset() { m_dLargeKlist.Reset(); m_hSmallKlist.Reset(); } void RtDiskKlist_t::NakedFlush() { if ( m_hSmallKlist.GetLength()==0 ) return; m_hSmallKlist.IterateStart(); while ( m_hSmallKlist.IterateNext() ) m_dLargeKlist.Add ( m_hSmallKlist.IterateGetKey() ); m_dLargeKlist.Uniq(); m_hSmallKlist.Reset(); } // is already id32<>id64 safe void RtDiskKlist_t::LoadFromFile ( const char * sFilename ) { m_tRwLargelock.WriteLock(); m_tRwSmalllock.WriteLock(); m_hSmallKlist.Reset(); m_tRwSmalllock.Unlock(); m_dLargeKlist.Reset(); CSphString sName, sError; sName.SetSprintf ( "%s.kill", sFilename ); if ( !sphIsReadable ( sName.cstr(), &sError ) ) { m_tRwLargelock.Unlock(); return; } CSphAutoreader rdKlist; if ( !rdKlist.Open ( sName, sError ) ) { m_tRwLargelock.Unlock(); return; } m_dLargeKlist.Resize ( rdKlist.GetDword() ); SphDocID_t uLastDocID = 0; ARRAY_FOREACH ( i, m_dLargeKlist ) { uLastDocID += ( SphDocID_t ) rdKlist.UnzipOffset(); m_dLargeKlist[i] = uLastDocID; }; m_tRwLargelock.Unlock(); } void RtDiskKlist_t::SaveToFile ( const char * sFilename ) { m_tRwLargelock.WriteLock(); m_tRwSmalllock.WriteLock(); NakedFlush(); m_tRwSmalllock.Unlock(); CSphWriter wrKlist; CSphString sName, sError; sName.SetSprintf ( "%s.kill", sFilename ); wrKlist.OpenFile ( sName.cstr(), sError ); wrKlist.PutDword ( m_dLargeKlist.GetLength() ); SphDocID_t uLastDocID = 0; ARRAY_FOREACH ( i, m_dLargeKlist ) { wrKlist.ZipOffset ( m_dLargeKlist[i] - uLastDocID ); uLastDocID = ( SphDocID_t ) m_dLargeKlist[i]; }; m_tRwLargelock.Unlock(); wrKlist.CloseFile (); } struct RtSegment_t { protected: static const int KLIST_ACCUM_THRESH = 32; public: static CSphStaticMutex m_tSegmentSeq; static int m_iSegments; ///< age tag sequence generator int m_iTag; ///< segment age tag CSphTightVector m_dWords; CSphVector m_dWordCheckpoints; CSphTightVector m_dDocs; CSphTightVector m_dHits; int m_iRows; ///< number of actually allocated rows int m_iAliveRows; ///< number of alive (non-killed) rows CSphVector m_dRows; ///< row data storage CSphVector m_dKlist; ///< sorted K-list bool m_bTlsKlist; ///< whether to apply TLS K-list during merge (must only be used by writer during Commit()) CSphTightVector m_dStrings; ///< strings storage CSphTightVector m_dMvas; ///< MVAs storage CSphVector m_dKeywordCheckpoints; RtSegment_t () { m_tSegmentSeq.Lock (); m_iTag = m_iSegments++; m_tSegmentSeq.Unlock (); m_iRows = 0; m_iAliveRows = 0; m_bTlsKlist = false; m_dStrings.Add ( 0 ); // dummy zero offset m_dMvas.Add ( 0 ); // dummy zero offset } int64_t GetUsedRam () const { // FIXME! gonna break on vectors over 2GB return m_dWords.GetLimit()*sizeof(m_dWords[0]) + m_dDocs.GetLimit()*sizeof(m_dDocs[0]) + m_dHits.GetLimit()*sizeof(m_dHits[0]) + m_dStrings.GetLimit()*sizeof(m_dStrings[0]) + m_dMvas.GetLimit()*sizeof(m_dMvas[0]) + m_dKeywordCheckpoints.GetLimit()*sizeof(m_dKeywordCheckpoints[0]); } int GetMergeFactor () const { return m_iRows; } const CSphRowitem * FindRow ( SphDocID_t uDocid ) const; const CSphRowitem * FindAliveRow ( SphDocID_t uDocid ) const; }; int RtSegment_t::m_iSegments = 0; CSphStaticMutex RtSegment_t::m_tSegmentSeq; const CSphRowitem * RtSegment_t::FindRow ( SphDocID_t uDocid ) const { // binary search through the rows int iStride = m_dRows.GetLength() / m_iRows; SphDocID_t uL = DOCINFO2ID ( m_dRows.Begin() ); SphDocID_t uR = DOCINFO2ID ( &m_dRows[m_dRows.GetLength()-iStride] ); if ( uDocid==uL ) return m_dRows.Begin(); if ( uDocid==uR ) return &m_dRows[m_dRows.GetLength()-iStride]; if ( uDocid

    uR ) return NULL; int iL = 0; int iR = m_iRows-1; while ( iR-iL>1 ) { int iM = iL + (iR-iL)/2; SphDocID_t uM = DOCINFO2ID ( &m_dRows[iM*iStride] ); if ( uDocid==uM ) return &m_dRows[iM*iStride]; else if ( uDocid>uM ) iL = iM; else iR = iM; } return NULL; } const CSphRowitem * RtSegment_t::FindAliveRow ( SphDocID_t uDocid ) const { if ( m_dKlist.BinarySearch ( uDocid ) ) return NULL; else return FindRow ( uDocid ); } ////////////////////////////////////////////////////////////////////////// struct RtDocWriter_t { CSphTightVector * m_pDocs; SphDocID_t m_uLastDocID; explicit RtDocWriter_t ( RtSegment_t * pSeg ) : m_pDocs ( &pSeg->m_dDocs ) , m_uLastDocID ( 0 ) {} void ZipDoc ( const RtDoc_t & tDoc ) { CSphTightVector * pDocs = m_pDocs; ZipDocid ( pDocs, tDoc.m_uDocID - m_uLastDocID ); m_uLastDocID = tDoc.m_uDocID; ZipDword ( pDocs, tDoc.m_uDocFields ); ZipDword ( pDocs, tDoc.m_uHits ); if ( tDoc.m_uHits==1 ) { ZipDword ( pDocs, tDoc.m_uHit & 0xffffffUL ); ZipDword ( pDocs, tDoc.m_uHit>>24 ); } else ZipDword ( pDocs, tDoc.m_uHit ); } DWORD ZipDocPtr () const { return m_pDocs->GetLength(); } void ZipRestart () { m_uLastDocID = 0; } }; template < typename DOCID = SphDocID_t > struct RtDocReader_T { typedef RtDoc_T RTDOC; const BYTE * m_pDocs; int m_iLeft; RTDOC m_tDoc; template < typename RTWORD > explicit RtDocReader_T ( const RtSegment_t * pSeg, const RTWORD & tWord ) { m_pDocs = ( pSeg->m_dDocs.Begin() ? pSeg->m_dDocs.Begin() + tWord.m_uDoc : NULL ); m_iLeft = tWord.m_uDocs; m_tDoc.m_uDocID = 0; } const RTDOC * UnzipDoc () { if ( !m_iLeft || !m_pDocs ) return NULL; const BYTE * pIn = m_pDocs; SphDocID_t uDeltaID; pIn = UnzipDocid ( &uDeltaID, pIn ); RTDOC & mtDoc = *(RTDOC*)&m_tDoc; mtDoc.m_uDocID += (DOCID) uDeltaID; DWORD uField; pIn = UnzipDword ( &uField, pIn ); m_tDoc.m_uDocFields = uField; pIn = UnzipDword ( &mtDoc.m_uHits, pIn ); if ( mtDoc.m_uHits==1 ) { DWORD a, b; pIn = UnzipDword ( &a, pIn ); pIn = UnzipDword ( &b, pIn ); mtDoc.m_uHit = a + ( b<<24 ); } else pIn = UnzipDword ( &mtDoc.m_uHit, pIn ); m_pDocs = pIn; m_iLeft--; return &mtDoc; } }; typedef RtDocReader_T<> RtDocReader_t; template < typename VECTOR > int sphPutBytes ( VECTOR * pOut, const void * pData, int iLen ) { int iOff = pOut->GetLength(); pOut->Resize ( iOff + iLen ); memcpy ( pOut->Begin()+iOff, pData, iLen ); return iOff; } struct RtWordWriter_t { CSphTightVector * m_pWords; CSphVector * m_pCheckpoints; CSphVector * m_dKeywordCheckpoints; CSphKeywordDeltaWriter m_tLastKeyword; SphWordID_t m_uLastWordID; DWORD m_uLastDoc; int m_iWords; bool m_bKeywordDict; int m_iWordsCheckpoint; RtWordWriter_t ( RtSegment_t * pSeg, bool bKeywordDict, int iWordsCheckpoint ) : m_pWords ( &pSeg->m_dWords ) , m_pCheckpoints ( &pSeg->m_dWordCheckpoints ) , m_dKeywordCheckpoints ( &pSeg->m_dKeywordCheckpoints ) , m_uLastWordID ( 0 ) , m_uLastDoc ( 0 ) , m_iWords ( 0 ) , m_bKeywordDict ( bKeywordDict ) , m_iWordsCheckpoint ( iWordsCheckpoint ) { assert ( !m_pWords->GetLength() ); assert ( !m_pCheckpoints->GetLength() ); assert ( !m_dKeywordCheckpoints->GetLength() ); } void ZipWord ( const RtWord_t & tWord ) { CSphTightVector * pWords = m_pWords; if ( ++m_iWords==m_iWordsCheckpoint ) { RtWordCheckpoint_t & tCheckpoint = m_pCheckpoints->Add(); if ( !m_bKeywordDict ) { tCheckpoint.m_iWordID = tWord.m_uWordID; } else { int iLen = tWord.m_sWord[0]; assert ( iLen && iLen-1Last() = '\0'; // checkpoint is NULL terminating string // reset keywords delta encoding m_tLastKeyword.Reset(); } tCheckpoint.m_iOffset = pWords->GetLength(); m_uLastWordID = 0; m_uLastDoc = 0; m_iWords = 1; } if ( !m_bKeywordDict ) { ZipWordid ( pWords, tWord.m_uWordID - m_uLastWordID ); } else { m_tLastKeyword.PutDelta ( *this, tWord.m_sWord+1, tWord.m_sWord[0] ); } ZipDword ( pWords, tWord.m_uDocs ); ZipDword ( pWords, tWord.m_uHits ); ZipDword ( pWords, tWord.m_uDoc - m_uLastDoc ); m_uLastWordID = tWord.m_uWordID; m_uLastDoc = tWord.m_uDoc; } void PutBytes ( const BYTE * pData, int iLen ) { sphPutBytes ( m_pWords, pData, iLen ); } }; template < typename WORDID = SphWordID_t > struct RtWordReader_T { typedef RtWord_T RTWORD; BYTE m_tPackedWord[SPH_MAX_KEYWORD_LEN+1]; const BYTE * m_pCur; const BYTE * m_pMax; RTWORD m_tWord; int m_iWords; bool m_bWordDict; int m_iWordsCheckpoint; RtWordReader_T ( const RtSegment_t * pSeg, bool bWordDict, int iWordsCheckpoint ) : m_iWords ( 0 ) , m_bWordDict ( bWordDict ) , m_iWordsCheckpoint ( iWordsCheckpoint ) { m_pCur = pSeg->m_dWords.Begin(); m_pMax = m_pCur + pSeg->m_dWords.GetLength(); m_tWord.m_uWordID = 0; m_tWord.m_uDoc = 0; if ( bWordDict ) m_tWord.m_sWord = m_tPackedWord; } const RTWORD * UnzipWord () { if ( ++m_iWords==m_iWordsCheckpoint ) { m_tWord.m_uDoc = 0; m_iWords = 1; if ( !m_bWordDict ) m_tWord.m_uWordID = 0; } if ( m_pCur>=m_pMax ) return NULL; const BYTE * pIn = m_pCur; DWORD uDeltaDoc; if ( m_bWordDict ) { BYTE iMatch, iDelta, uPacked; uPacked = *pIn++; if ( uPacked & 0x80 ) { iDelta = ( ( uPacked>>4 ) & 7 ) + 1; iMatch = uPacked & 15; } else { iDelta = uPacked & 127; iMatch = *pIn++; } m_tPackedWord[0] = iMatch+iDelta; memcpy ( m_tPackedWord+1+iMatch, pIn, iDelta ); pIn += iDelta; } else { SphWordID_t uDeltaID; pIn = UnzipWordid ( &uDeltaID, pIn ); m_tWord.m_uWordID += (WORDID) uDeltaID; } pIn = UnzipDword ( &m_tWord.m_uDocs, pIn ); pIn = UnzipDword ( &m_tWord.m_uHits, pIn ); pIn = UnzipDword ( &uDeltaDoc, pIn ); m_pCur = pIn; m_tWord.m_uDoc += uDeltaDoc; return &m_tWord; } }; typedef RtWordReader_T RtWordReader_t; struct RtHitWriter_t { CSphTightVector * m_pHits; DWORD m_uLastHit; explicit RtHitWriter_t ( RtSegment_t * pSeg ) : m_pHits ( &pSeg->m_dHits ) , m_uLastHit ( 0 ) {} void ZipHit ( DWORD uValue ) { ZipDword ( m_pHits, uValue - m_uLastHit ); m_uLastHit = uValue; } void ZipRestart () { m_uLastHit = 0; } DWORD ZipHitPtr () const { return m_pHits->GetLength(); } }; struct RtHitReader_t { const BYTE * m_pCur; DWORD m_iLeft; DWORD m_uLast; RtHitReader_t () : m_pCur ( NULL ) , m_iLeft ( 0 ) , m_uLast ( 0 ) {} template < typename RTDOC > explicit RtHitReader_t ( const RtSegment_t * pSeg, const RTDOC * pDoc ) { m_pCur = &pSeg->m_dHits [ pDoc->m_uHit ]; m_iLeft = pDoc->m_uHits; m_uLast = 0; } DWORD UnzipHit () { if ( !m_iLeft ) return 0; DWORD uValue; m_pCur = UnzipDword ( &uValue, m_pCur ); m_uLast += uValue; m_iLeft--; return m_uLast; } }; struct RtHitReader2_t : public RtHitReader_t { const BYTE * m_pBase; RtHitReader2_t () : m_pBase ( NULL ) {} void Seek ( SphOffset_t uOff, int iHits ) { m_pCur = m_pBase + uOff; m_iLeft = iHits; m_uLast = 0; } }; ////////////////////////////////////////////////////////////////////////// /// forward ref struct RtIndex_t; struct AccDocDup_t { SphDocID_t m_uDocid; int m_iDupCount; }; /// indexing accumulator class RtAccum_t { public: RtIndex_t * m_pIndex; ///< my current owner in this thread int m_iAccumDocs; CSphTightVector m_dAccum; CSphVector m_dAccumRows; CSphVector m_dAccumKlist; CSphTightVector m_dStrings; CSphTightVector m_dMvas; CSphVector m_dPerDocHitsCount; bool m_bKeywordDict; CSphDict * m_pDict; private: CSphDict * m_pRefDict; RtIndex_t * m_pRefIndex; CSphDict * m_pDictCloned; ISphRtDictWraper * m_pDictRt; public: explicit RtAccum_t ( bool bKeywordDict ); ~RtAccum_t(); void SetupDict ( RtIndex_t * pIndex, CSphDict * pDict, bool bKeywordDict ); void ResetDict (); void Sort (); void AddDocument ( ISphHits * pHits, const CSphMatch & tDoc, int iRowSize, const char ** ppStr, const CSphVector & dMvas ); RtSegment_t * CreateSegment ( int iRowSize, int iWordsCheckpoint ); void CleanupDuplacates ( int iRowSize ); }; /// TLS indexing accumulator (we disallow two uncommitted adds within one thread; and so need at most one) SphThreadKey_t g_tTlsAccumKey; /// binlog file view of the index /// everything that a given log file needs to know about an index struct BinlogIndexInfo_t { CSphString m_sName; ///< index name int64_t m_iMinTID; ///< min TID logged by this file int64_t m_iMaxTID; ///< max TID logged by this file int64_t m_iFlushedTID; ///< last flushed TID int64_t m_tmMin; ///< min TID timestamp int64_t m_tmMax; ///< max TID timestamp CSphIndex * m_pIndex; ///< replay only; associated index (might be NULL if we don't serve it anymore!) RtIndex_t * m_pRT; ///< replay only; RT index handle (might be NULL if N/A or non-RT) int64_t m_iPreReplayTID; ///< replay only; index TID at the beginning of this file replay BinlogIndexInfo_t () : m_iMinTID ( INT64_MAX ) , m_iMaxTID ( 0 ) , m_iFlushedTID ( 0 ) , m_tmMin ( INT64_MAX ) , m_tmMax ( 0 ) , m_pIndex ( NULL ) , m_pRT ( NULL ) , m_iPreReplayTID ( 0 ) {} }; /// binlog file descriptor /// file id (aka extension), plus a list of associated index infos struct BinlogFileDesc_t { int m_iExt; CSphVector m_dIndexInfos; BinlogFileDesc_t () : m_iExt ( 0 ) {} }; /// Bin Log Operation enum Blop_e { BLOP_COMMIT = 1, BLOP_UPDATE_ATTRS = 2, BLOP_ADD_INDEX = 3, BLOP_ADD_CACHE = 4, BLOP_TOTAL }; // forward declaration class BufferReader_t; class RtBinlog_c; struct RtIndex_t; class BinlogWriter_c : protected CSphWriter { public: BinlogWriter_c (); virtual ~BinlogWriter_c () {} virtual void Flush (); void Write (); void Fsync (); bool HasUnwrittenData () const { return m_iPoolUsed>0; } bool HasUnsyncedData () const { return m_iLastFsyncPos!=m_iLastWritePos; } void ResetCrc (); ///< restart checksumming void WriteCrc (); ///< finalize and write current checksum to output stream void SetBufferSize ( int iBufferSize ) { CSphWriter::SetBufferSize ( iBufferSize ); } bool OpenFile ( const CSphString & sName, CSphString & sErrorBuffer ) { return CSphWriter::OpenFile ( sName, sErrorBuffer ); } void SetFile ( int iFD, SphOffset_t * pSharedOffset ) { CSphWriter::SetFile ( iFD, pSharedOffset ); } void CloseFile ( bool bTruncate=false ) { CSphWriter::CloseFile ( bTruncate ); } SphOffset_t GetPos () const { return m_iPos; } void PutBytes ( const void * pData, int iSize ); void PutString ( const char * szString ); void PutDword ( DWORD uValue ) { PutBytes ( &uValue, sizeof(DWORD) ); } void ZipValue ( uint64_t uValue ); private: int64_t m_iLastWritePos; int64_t m_iLastFsyncPos; DWORD m_uCRC; }; class BinlogReader_c : protected CSphAutoreader { public: bool Open ( const CSphString & sFilename, CSphString & sError ) { return CSphAutoreader::Open ( sFilename, sError ); } void Close () { CSphAutoreader::Close(); } SphOffset_t GetFilesize () { return CSphAutoreader::GetFilesize(); } void GetBytes ( void * pData, int iSize ); CSphString GetString (); DWORD GetDword (); uint64_t UnzipValue (); bool GetErrorFlag () { return CSphAutoreader::GetErrorFlag(); } SphOffset_t GetPos () { return CSphAutoreader::GetPos(); } void ResetCrc (); bool CheckCrc ( const char * sOp, const char * sIndexName, int64_t iTid, int64_t iTxnPos ); private: DWORD m_uCRC; }; class RtBinlog_c : public ISphBinlog { public: RtBinlog_c (); ~RtBinlog_c (); void BinlogCommit ( int64_t * pTID, const char * sIndexName, const RtSegment_t * pSeg, const CSphVector & dKlist, bool bKeywordDict ); void BinlogUpdateAttributes ( int64_t * pTID, const char * sIndexName, const CSphAttrUpdate & tUpd ); void NotifyIndexFlush ( const char * sIndexName, int64_t iTID, bool bShutdown ); void Configure ( const CSphConfigSection & hSearchd, bool bTestMode ); void Replay ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, ProgressCallbackSimple_t * pfnProgressCallback ); void CreateTimerThread (); private: static const DWORD BINLOG_VERSION = 4; static const DWORD BINLOG_HEADER_MAGIC = 0x4c425053; /// magic 'SPBL' header that marks binlog file static const DWORD BLOP_MAGIC = 0x214e5854; /// magic 'TXN!' header that marks binlog entry static const DWORD BINLOG_META_MAGIC = 0x494c5053; /// magic 'SPLI' header that marks binlog meta int64_t m_iFlushTimeLeft; volatile int m_iFlushPeriod; enum OnCommitAction_e { ACTION_NONE, ACTION_FSYNC, ACTION_WRITE }; OnCommitAction_e m_eOnCommit; CSphMutex m_tWriteLock; // lock on operation int m_iLockFD; CSphString m_sWriterError; BinlogWriter_c m_tWriter; mutable CSphVector m_dLogFiles; // active log files CSphString m_sLogPath; SphThread_t m_tUpdateTread; bool m_bReplayMode; // replay mode indicator bool m_bDisabled; int m_iRestartSize; // binlog size restart threshold // replay stats mutable int m_iReplayedRows; private: static void DoAutoFlush ( void * pBinlog ); int GetWriteIndexID ( const char * sName, int64_t iTID, int64_t tmNow ); void LoadMeta (); void SaveMeta (); void LockFile ( bool bLock ); void DoCacheWrite (); void CheckDoRestart (); void CheckDoFlush (); void OpenNewLog ( int iLastState=0 ); int ReplayBinlog ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, int iBinlog ); bool ReplayCommit ( int iBinlog, DWORD uReplayFlags, BinlogReader_c & tReader ) const; bool ReplayUpdateAttributes ( int iBinlog, BinlogReader_c & tReader ) const; bool ReplayIndexAdd ( int iBinlog, const SmallStringHash_T & hIndexes, BinlogReader_c & tReader ) const; bool ReplayCacheAdd ( int iBinlog, BinlogReader_c & tReader ) const; }; /// RAM based index struct RtQword_t; struct RtIndex_t : public ISphRtIndex, public ISphNoncopyable, public ISphWordlist { private: static const DWORD META_HEADER_MAGIC = 0x54525053; ///< my magic 'SPRT' header static const DWORD META_VERSION = 5; ///< current version private: int m_iStride; CSphVector m_pSegments; CSphMutex m_tWriterMutex; mutable CSphRwlock m_tRwlock; int64_t m_iRamSize; CSphString m_sPath; bool m_bPathStripped; CSphVector m_pDiskChunks; int m_iLockFD; mutable RtDiskKlist_t m_tKlist; int64_t m_iSavedTID; int64_t m_iSavedRam; int64_t m_tmSaved; bool m_bKeywordDict; int m_iWordsCheckpoint; public: explicit RtIndex_t ( const CSphSchema & tSchema, const char * sIndexName, int64_t iRamSize, const char * sPath, bool bKeywordDict ); virtual ~RtIndex_t (); virtual bool AddDocument ( int iFields, const char ** ppFields, const CSphMatch & tDoc, bool bReplace, const char ** ppStr, const CSphVector & dMvas, CSphString & sError ); virtual bool AddDocument ( ISphHits * pHits, const CSphMatch & tDoc, const char ** ppStr, const CSphVector & dMvas, CSphString & sError ); virtual bool DeleteDocument ( const SphDocID_t * pDocs, int iDocs, CSphString & sError ); virtual void Commit (); virtual void RollBack (); void CommitReplayable ( RtSegment_t * pNewSeg, CSphVector & dAccKlist ); // FIXME? protect? virtual void CheckRamFlush (); virtual void ForceRamFlush ( bool bPeriodic=false ); virtual void ForceDiskChunk (); virtual bool AttachDiskIndex ( CSphIndex * pIndex, CSphString & sError ); private: /// acquire thread-local indexing accumulator /// returns NULL if another index already uses it in an open txn RtAccum_t * AcquireAccum ( CSphString * sError=NULL ); RtSegment_t * MergeSegments ( const RtSegment_t * pSeg1, const RtSegment_t * pSeg2, const CSphVector * pAccKlist ); const RtWord_t * CopyWord ( RtSegment_t * pDst, RtWordWriter_t & tOutWord, const RtSegment_t * pSrc, const RtWord_t * pWord, RtWordReader_t & tInWord, const CSphVector * pAccKlist ); void MergeWord ( RtSegment_t * pDst, const RtSegment_t * pSrc1, const RtWord_t * pWord1, const RtSegment_t * pSrc2, const RtWord_t * pWord2, RtWordWriter_t & tOut, const CSphVector * pAccKlist ); void CopyDoc ( RtSegment_t * pSeg, RtDocWriter_t & tOutDoc, RtWord_t * pWord, const RtSegment_t * pSrc, const RtDoc_t * pDoc ); void SaveMeta ( int iDiskChunks ); void SaveDiskHeader ( const char * sFilename, int iCheckpoints, SphOffset_t iCheckpointsPosition, DWORD uKillListSize, DWORD uMinMaxSize, bool bForceID32=false ) const; void SaveDiskData ( const char * sFilename ) const; template < typename DOCID, typename WORDID > void SaveDiskDataImpl ( const char * sFilename ) const; void SaveDiskChunk (); CSphIndex * LoadDiskChunk ( int iChunk ); bool LoadRamChunk ( DWORD uVersion ); bool SaveRamChunk (); virtual void GetPrefixedWords ( const char * sWord, int iWordLen, CSphVector & dPrefixedWords, BYTE * pDictBuf, int iFD ) const; public: #if USE_WINDOWS #pragma warning(push,1) #pragma warning(disable:4100) #endif virtual SphAttr_t * GetKillList () const { return NULL; } virtual int GetKillListSize () const { return 0; } virtual bool HasDocid ( SphDocID_t ) const { assert ( 0 ); return false; } virtual int Build ( const CSphVector & dSources, int iMemoryLimit, int iWriteBuffer ) { return 0; } virtual bool Merge ( CSphIndex * pSource, CSphVector & dFilters, bool bMergeKillLists ) { return false; } virtual bool Prealloc ( bool bMlock, bool bStripPath, CSphString & sWarning ); virtual void Dealloc () {} virtual bool Preread (); virtual void SetBase ( const char * sNewBase ) {} virtual bool Rename ( const char * sNewBase ) { return true; } virtual bool Lock () { return true; } virtual void Unlock () {} virtual bool Mlock () { return true; } virtual void PostSetup(); virtual bool IsRT() const { return true; } virtual int UpdateAttributes ( const CSphAttrUpdate & tUpd, int iIndex, CSphString & sError ); virtual bool SaveAttributes () { return true; } virtual DWORD GetAttributeStatus () const { return 0; } virtual void DebugDumpHeader ( FILE * fp, const char * sHeaderName, bool bConfig ) {} virtual void DebugDumpDocids ( FILE * fp ) {} virtual void DebugDumpHitlist ( FILE * fp, const char * sKeyword, bool bID ) {} virtual int DebugCheck ( FILE * fp ); #if USE_WINDOWS #pragma warning(pop) #endif public: virtual bool EarlyReject ( CSphQueryContext * pCtx, CSphMatch & ) const; virtual const CSphSourceStats & GetStats () const { return m_tStats; } virtual bool MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const; virtual bool MultiQueryEx ( int iQueries, const CSphQuery * ppQueries, CSphQueryResult ** ppResults, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const; virtual bool GetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const; void CopyDocinfo ( CSphMatch & tMatch, const DWORD * pFound ) const; const CSphRowitem * FindDocinfo ( const RtSegment_t * pSeg, SphDocID_t uDocID ) const; bool RtQwordSetup ( RtQword_t * pQword, RtSegment_t * pSeg ) const; static bool RtQwordSetupSegment ( RtQword_t * pQword, RtSegment_t * pSeg, bool bSetup, bool bWordDict, int iWordsCheckpoint ); CSphDict * SetupExactDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer * pTokenizer ) const; CSphDict * SetupStarDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer * pTokenizer ) const; virtual const CSphSchema & GetMatchSchema () const { return m_tSchema; } virtual const CSphSchema & GetInternalSchema () const { return m_tSchema; } int64_t GetUsedRam () const; virtual void SetEnableStar ( bool bEnableStar ); bool IsWordDict () const { return m_bKeywordDict; } protected: CSphSourceStats m_tStats; }; #define SPH_RT_WORDS_PER_CHECKPOINT_v3 1024 #define SPH_RT_WORDS_PER_CHECKPOINT_v5 48 RtIndex_t::RtIndex_t ( const CSphSchema & tSchema, const char * sIndexName, int64_t iRamSize, const char * sPath, bool bKeywordDict ) : ISphRtIndex ( sIndexName, "rtindex" ) , m_iStride ( DOCINFO_IDSIZE + tSchema.GetRowSize() ) , m_iRamSize ( iRamSize ) , m_sPath ( sPath ) , m_bPathStripped ( false ) , m_iLockFD ( -1 ) , m_iSavedTID ( m_iTID ) , m_iSavedRam ( 0 ) , m_tmSaved ( sphMicroTimer() ) , m_bKeywordDict ( bKeywordDict ) , m_iWordsCheckpoint ( SPH_RT_WORDS_PER_CHECKPOINT_v5 ) { MEMORY ( SPH_MEM_IDX_RT ); m_tSchema = tSchema; #ifndef NDEBUG // check that index cols are static for ( int i=0; i=0 ) ::close ( m_iLockFD ); g_pBinlog->NotifyIndexFlush ( m_sIndexName.cstr(), m_iTID, true ); tmSave = sphMicroTimer() - tmSave; if ( tmSave>=1000 ) { sphInfo ( "rt: index %s: ramchunk saved in %d.%03d sec", m_sIndexName.cstr(), (int)(tmSave/1000000), (int)((tmSave/1000)%1000) ); } } #define SPH_THRESHOLD_SAVE_RAM ( 64*1024*1024 ) static int64_t g_iRtFlushPeriod = 10*60*60; // default period is 10 hours void RtIndex_t::CheckRamFlush () { int64_t tmSave = sphMicroTimer(); if ( m_iTID<=m_iSavedTID || ( tmSave-m_tmSaved )/1000000NotifyIndexFlush ( m_sIndexName.cstr(), m_iTID, false ); int64_t iWasTID = m_iSavedTID; int64_t iWasRam = m_iSavedRam; int64_t tmDelta = sphMicroTimer() - m_tmSaved; m_iSavedTID = m_iTID; m_iSavedRam = iUsedRam; m_tmSaved = sphMicroTimer(); m_tWriterMutex.Unlock(); tmSave = sphMicroTimer() - tmSave; sphInfo ( "rt: index %s: ramchunk saved ok (mode=%s, last TID="INT64_FMT", current TID="INT64_FMT", last ram=%d.%03d Mb, current ram=%d.%03d Mb, time delta=%d sec, took=%d.%03d sec)" , m_sIndexName.cstr(), bPeriodic ? "periodic" : "forced" , iWasTID, m_iTID, (int)(iWasRam/1024/1024), (int)((iWasRam/1024)%1000) , (int)(m_iSavedRam/1024/1024), (int)((m_iSavedRam/1024)%1000) , (int) (tmDelta/1000000), (int)(tmSave/1000000), (int)((tmSave/1000)%1000) ); } int64_t RtIndex_t::GetUsedRam () const { int64_t iTotal = 0; ARRAY_FOREACH ( i, m_pSegments ) iTotal += m_pSegments[i]->GetUsedRam(); return iTotal; } ////////////////////////////////////////////////////////////////////////// // INDEXING ////////////////////////////////////////////////////////////////////////// class CSphSource_StringVector : public CSphSource_Document { public: explicit CSphSource_StringVector ( int iFields, const char ** ppFields, const CSphSchema & tSchema ); virtual ~CSphSource_StringVector () {} virtual bool Connect ( CSphString & ); virtual void Disconnect (); virtual bool HasAttrsConfigured () { return false; } virtual bool IterateStart ( CSphString & ) { return true; } virtual bool IterateMultivaluedStart ( int, CSphString & ) { return false; } virtual bool IterateMultivaluedNext () { return false; } virtual bool IterateFieldMVAStart ( int, CSphString & ) { return false; } virtual bool IterateFieldMVANext () { return false; } virtual bool IterateKillListStart ( CSphString & ) { return false; } virtual bool IterateKillListNext ( SphDocID_t & ) { return false; } virtual BYTE ** NextDocument ( CSphString & ) { return m_dFields.Begin(); } protected: CSphVector m_dFields; CSphVector m_dHits; }; CSphSource_StringVector::CSphSource_StringVector ( int iFields, const char ** ppFields, const CSphSchema & tSchema ) : CSphSource_Document ( "$stringvector" ) { m_tSchema = tSchema; m_dFields.Resize ( 1+iFields ); for ( int i=0; i & dMvas, CSphString & sError ) { assert ( g_bRTChangesAllowed ); if ( !tDoc.m_iDocID ) return true; MEMORY ( SPH_MEM_IDX_RT ); if ( !bReplace ) { m_tRwlock.ReadLock (); ARRAY_FOREACH ( i, m_pSegments ) if ( FindDocinfo ( m_pSegments[i], tDoc.m_iDocID ) && !m_pSegments[i]->m_dKlist.BinarySearch ( tDoc.m_iDocID ) ) { m_tRwlock.Unlock (); sError.SetSprintf ( "duplicate id '"UINT64_FMT"'", (uint64_t)tDoc.m_iDocID ); return false; // already exists and not deleted; INSERT fails } m_tRwlock.Unlock (); } RtAccum_t * pAcc = AcquireAccum ( &sError ); if ( !pAcc ) return false; CSphScopedPtr pTokenizer ( m_pTokenizer->Clone ( false ) ); // avoid race CSphSource_StringVector tSrc ( iFields, ppFields, m_tSchema ); tSrc.Setup ( m_tSettings ); tSrc.SetTokenizer ( pTokenizer.Ptr() ); tSrc.SetDict ( pAcc->m_pDict ); if ( !tSrc.Connect ( m_sLastError ) ) return false; tSrc.m_tDocInfo.Clone ( tDoc, m_tSchema.GetRowSize() ); if ( !tSrc.IterateStart ( sError ) || !tSrc.IterateDocument ( sError ) ) return false; ISphHits * pHits = tSrc.IterateHits ( sError ); return AddDocument ( pHits, tDoc, ppStr, dMvas, sError ); } void AccumCleanup ( void * pArg ) { RtAccum_t * pAcc = (RtAccum_t *) pArg; SafeDelete ( pAcc ); } RtAccum_t * RtIndex_t::AcquireAccum ( CSphString * sError ) { RtAccum_t * pAcc = NULL; // check that no other index is holding the acc pAcc = (RtAccum_t*) sphThreadGet ( g_tTlsAccumKey ); if ( pAcc && pAcc->m_pIndex!=NULL && pAcc->m_pIndex!=this ) { if ( sError ) sError->SetSprintf ( "current txn is working with another index ('%s')", pAcc->m_pIndex->m_tSchema.m_sName.cstr() ); return NULL; } if ( !pAcc ) { pAcc = new RtAccum_t ( m_bKeywordDict ); sphThreadSet ( g_tTlsAccumKey, pAcc ); sphThreadOnExit ( AccumCleanup, pAcc ); } assert ( pAcc->m_pIndex==NULL || pAcc->m_pIndex==this ); pAcc->m_pIndex = this; pAcc->SetupDict ( this, m_pDict, m_bKeywordDict ); return pAcc; } bool RtIndex_t::AddDocument ( ISphHits * pHits, const CSphMatch & tDoc, const char ** ppStr, const CSphVector & dMvas, CSphString & sError ) { assert ( g_bRTChangesAllowed ); RtAccum_t * pAcc = AcquireAccum ( &sError ); if ( pAcc ) pAcc->AddDocument ( pHits, tDoc, m_tSchema.GetRowSize(), ppStr, dMvas ); return ( pAcc!=NULL ); } RtAccum_t::RtAccum_t ( bool bKeywordDict ) : m_pIndex ( NULL ) , m_iAccumDocs ( 0 ) , m_bKeywordDict ( bKeywordDict ) , m_pDict ( NULL ) , m_pRefDict ( NULL ) , m_pRefIndex ( NULL ) , m_pDictCloned ( NULL ) , m_pDictRt ( NULL ) { m_dStrings.Add ( 0 ); m_dMvas.Add ( 0 ); } RtAccum_t::~RtAccum_t() { SafeDelete ( m_pDictCloned ); SafeDelete ( m_pDictRt ); } void RtAccum_t::SetupDict ( RtIndex_t * pIndex, CSphDict * pDict, bool bKeywordDict ) { if ( pIndex!=m_pRefIndex || pDict!=m_pRefDict || bKeywordDict!=m_bKeywordDict ) { SafeDelete ( m_pDictCloned ); SafeDelete ( m_pDictRt ); m_pDict = NULL; m_pRefIndex = pIndex; m_pRefDict = pDict; m_bKeywordDict = bKeywordDict; } if ( !m_pDict ) { m_pDict = m_pRefDict; if ( m_pRefDict->HasState() ) { m_pDict = m_pDictCloned = m_pRefDict->Clone(); } if ( m_bKeywordDict ) { m_pDict = m_pDictRt = sphCreateRtKeywordsDictionaryWrapper ( m_pDict ); } } } void RtAccum_t::ResetDict () { assert ( !m_bKeywordDict || m_pDictRt ); if ( m_pDictRt ) { m_pDictRt->ResetKeywords(); } } void RtAccum_t::Sort () { if ( !m_bKeywordDict ) { m_dAccum.Sort ( CmpHitPlain_fn() ); } else { assert ( m_pDictRt ); const BYTE * pPackedKeywords = m_pDictRt->GetPackedKeywords(); m_dAccum.Sort ( CmpHitKeywords_fn ( pPackedKeywords ) ); } } void RtAccum_t::AddDocument ( ISphHits * pHits, const CSphMatch & tDoc, int iRowSize, const char ** ppStr, const CSphVector & dMvas ) { MEMORY ( SPH_MEM_IDX_RT_ACCUM ); // schedule existing copies for deletion m_dAccumKlist.Add ( tDoc.m_iDocID ); // reserve some hit space on first use if ( pHits && pHits->Length() && !m_dAccum.GetLength() ) m_dAccum.Reserve ( 128*1024 ); // accumulate row data; expect fully dynamic rows assert ( !tDoc.m_pStatic ); assert (!( !tDoc.m_pDynamic && iRowSize!=0 )); assert (!( tDoc.m_pDynamic && (int)tDoc.m_pDynamic[-1]!=iRowSize )); m_dAccumRows.Resize ( m_dAccumRows.GetLength() + DOCINFO_IDSIZE + iRowSize ); CSphRowitem * pRow = &m_dAccumRows [ m_dAccumRows.GetLength() - DOCINFO_IDSIZE - iRowSize ]; DOCINFOSETID ( pRow, tDoc.m_iDocID ); CSphRowitem * pAttrs = DOCINFO2ATTRS(pRow); for ( int i=0; iGetInternalSchema(); int iAttr = 0; for ( int i=0; i=1 ); m_dStrings.Resize ( iOff + iLenPacked + iLen ); memcpy ( &m_dStrings[iOff], dLen, iLenPacked ); memcpy ( &m_dStrings[iOff+iLenPacked], pStr, iLen ); sphSetRowAttr ( pAttrs, tColumn.m_tLocator, iOff ); } else { sphSetRowAttr ( pAttrs, tColumn.m_tLocator, 0 ); } } else if ( tColumn.m_eAttrType==SPH_ATTR_UINT32SET || tColumn.m_eAttrType==SPH_ATTR_UINT64SET ) { assert ( m_dMvas.GetLength() ); int iCount = dMvas[iMva]; if ( iCount ) { int iDst = m_dMvas.GetLength(); m_dMvas.Resize ( iDst+iCount+1 ); memcpy ( m_dMvas.Begin()+iDst, dMvas.Begin()+iMva, (iCount+1)*sizeof(dMvas[0]) ); sphSetRowAttr ( pAttrs, tColumn.m_tLocator, iDst ); } else { sphSetRowAttr ( pAttrs, tColumn.m_tLocator, 0 ); } iMva += iCount+1; } } // accumulate hits int iHits = 0; if ( pHits && pHits->Length() ) { iHits = pHits->Length(); m_dAccum.Reserve ( m_dAccum.GetLength()+iHits ); for ( const CSphWordHit * pHit = pHits->First(); pHit<=pHits->Last(); pHit++ ) m_dAccum.Add ( *pHit ); } m_dPerDocHitsCount.Add ( iHits ); m_iAccumDocs++; } // cook checkpoints - make NULL terminating strings from offsets static void FixupSegmentCheckpoints ( RtSegment_t * pSeg ) { assert ( pSeg && ( !pSeg->m_dWordCheckpoints.GetLength() || pSeg->m_dKeywordCheckpoints.GetLength() ) ); if ( !pSeg->m_dWordCheckpoints.GetLength() ) return; const char * pBase = (const char *)pSeg->m_dKeywordCheckpoints.Begin(); assert ( pBase ); ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { const char * sWord = pBase + pSeg->m_dWordCheckpoints[i].m_iWordID; pSeg->m_dWordCheckpoints[i].m_sWord = sWord; } } RtSegment_t * RtAccum_t::CreateSegment ( int iRowSize, int iWordsCheckpoint ) { if ( !m_iAccumDocs ) return NULL; MEMORY ( SPH_MEM_IDX_RT_ACCUM ); RtSegment_t * pSeg = new RtSegment_t (); CSphWordHit tClosingHit; tClosingHit.m_iWordID = WORDID_MAX; tClosingHit.m_iDocID = DOCID_MAX; tClosingHit.m_iWordPos = EMPTY_HIT; m_dAccum.Add ( tClosingHit ); RtDoc_t tDoc; tDoc.m_uDocID = 0; tDoc.m_uDocFields = 0; tDoc.m_uHits = 0; tDoc.m_uHit = 0; RtWord_t tWord; tWord.m_uWordID = 0; tWord.m_uDocs = 0; tWord.m_uHits = 0; tWord.m_uDoc = 0; RtDocWriter_t tOutDoc ( pSeg ); RtWordWriter_t tOutWord ( pSeg, m_bKeywordDict, iWordsCheckpoint ); RtHitWriter_t tOutHit ( pSeg ); const BYTE * pPacketBase = NULL; if ( m_bKeywordDict ) pPacketBase = m_pDictRt->GetPackedKeywords(); Hitpos_t uEmbeddedHit = EMPTY_HIT; ARRAY_FOREACH ( i, m_dAccum ) { const CSphWordHit & tHit = m_dAccum[i]; // new keyword or doc; flush current doc if ( tHit.m_iWordID!=tWord.m_uWordID || tHit.m_iDocID!=tDoc.m_uDocID ) { if ( tDoc.m_uDocID ) { tWord.m_uDocs++; tWord.m_uHits += tDoc.m_uHits; if ( uEmbeddedHit ) { assert ( tDoc.m_uHits==1 ); tDoc.m_uHit = uEmbeddedHit; } tOutDoc.ZipDoc ( tDoc ); tDoc.m_uDocFields = 0; tDoc.m_uHits = 0; tDoc.m_uHit = tOutHit.ZipHitPtr(); } tDoc.m_uDocID = tHit.m_iDocID; tOutHit.ZipRestart (); uEmbeddedHit = 0; } // new keyword; flush current keyword if ( tHit.m_iWordID!=tWord.m_uWordID ) { tOutDoc.ZipRestart (); if ( tWord.m_uWordID ) { if ( m_bKeywordDict ) { const BYTE * pPackedWord = pPacketBase + tWord.m_uWordID; assert ( pPackedWord[0] && pPackedWord[0]+1GetPackedLen() ); tWord.m_sWord = pPackedWord; } tOutWord.ZipWord ( tWord ); } tWord.m_uWordID = tHit.m_iWordID; tWord.m_uDocs = 0; tWord.m_uHits = 0; tWord.m_uDoc = tOutDoc.ZipDocPtr(); } // just a new hit if ( !tDoc.m_uHits ) { uEmbeddedHit = tHit.m_iWordPos; } else { if ( uEmbeddedHit ) { tOutHit.ZipHit ( uEmbeddedHit ); uEmbeddedHit = 0; } tOutHit.ZipHit ( tHit.m_iWordPos ); } const int iField = HITMAN::GetField ( tHit.m_iWordPos ); if ( iField<32 ) tDoc.m_uDocFields |= ( 1UL<m_iRows = m_iAccumDocs; pSeg->m_iAliveRows = m_iAccumDocs; // copy and sort attributes int iStride = DOCINFO_IDSIZE + iRowSize; pSeg->m_dRows.SwapData ( m_dAccumRows ); pSeg->m_dStrings.SwapData ( m_dStrings ); pSeg->m_dMvas.SwapData ( m_dMvas ); sphSortDocinfos ( pSeg->m_dRows.Begin(), pSeg->m_dRows.GetLength()/iStride, iStride ); // done return pSeg; } struct AccumDocHits_t { SphDocID_t m_uDocid; int m_iDocIndex; int m_iHitIndex; int m_iHitCount; }; struct CmpDocHitIndex_t { inline bool IsLess ( const AccumDocHits_t & a, const AccumDocHits_t & b ) const { return ( a.m_uDocid dDocHits ( m_dPerDocHitsCount.GetLength() ); int iStride = DOCINFO_IDSIZE + iRowSize; int iHitIndex = 0; CSphRowitem * pRow = m_dAccumRows.Begin(); for ( int i=0; i=0; iHit-- ) { if ( !dDocHits[iHit].m_iHitCount ) continue; int iFrom = dDocHits[iHit].m_iHitIndex; int iCount = dDocHits[iHit].m_iHitCount; if ( iFrom+iCount=0; iDoc-- ) { int iDst = dDocHits[iDoc].m_iDocIndex*iStride; int iSrc = iDst+iStride; while ( iSrc * pAccKlist ) { RtDocReader_t tInDoc ( pSrc, *pWord ); RtDocWriter_t tOutDoc ( pDst ); RtWord_t tNewWord = *pWord; tNewWord.m_uDoc = tOutDoc.ZipDocPtr(); // if flag is there, acc must be there // however, NOT vice versa (newly created segments are unaffected by TLS klist) assert (!( pSrc->m_bTlsKlist && !pAccKlist )); #if 0 // index *must* be holding acc during merge assert ( !pAcc || pAcc->m_pIndex==this ); #endif // copy docs for ( ;; ) { const RtDoc_t * pDoc = tInDoc.UnzipDoc(); if ( !pDoc ) break; // apply klist bool bKill = ( pSrc->m_dKlist.BinarySearch ( pDoc->m_uDocID )!=NULL ); if ( !bKill && pSrc->m_bTlsKlist ) bKill = ( pAccKlist->BinarySearch ( pDoc->m_uDocID )!=NULL ); if ( bKill ) { tNewWord.m_uDocs--; tNewWord.m_uHits -= pDoc->m_uHits; continue; } // short route, single embedded hit if ( pDoc->m_uHits==1 ) { tOutDoc.ZipDoc ( *pDoc ); continue; } // long route, copy hits RtHitWriter_t tOutHit ( pDst ); RtHitReader_t tInHit ( pSrc, pDoc ); RtDoc_t tDoc = *pDoc; tDoc.m_uHit = tOutHit.ZipHitPtr(); // OPTIMIZE? decode+memcpy? for ( DWORD uValue=tInHit.UnzipHit(); uValue; uValue=tInHit.UnzipHit() ) tOutHit.ZipHit ( uValue ); // copy doc tOutDoc.ZipDoc ( tDoc ); } // append word to the dictionary if ( tNewWord.m_uDocs ) tOutWord.ZipWord ( tNewWord ); // move forward return tInWord.UnzipWord (); } void RtIndex_t::CopyDoc ( RtSegment_t * pSeg, RtDocWriter_t & tOutDoc, RtWord_t * pWord, const RtSegment_t * pSrc, const RtDoc_t * pDoc ) { pWord->m_uDocs++; pWord->m_uHits += pDoc->m_uHits; if ( pDoc->m_uHits==1 ) { tOutDoc.ZipDoc ( *pDoc ); return; } RtHitWriter_t tOutHit ( pSeg ); RtHitReader_t tInHit ( pSrc, pDoc ); RtDoc_t tDoc = *pDoc; tDoc.m_uHit = tOutHit.ZipHitPtr(); tOutDoc.ZipDoc ( tDoc ); // OPTIMIZE? decode+memcpy? for ( DWORD uValue=tInHit.UnzipHit(); uValue; uValue=tInHit.UnzipHit() ) tOutHit.ZipHit ( uValue ); } void RtIndex_t::MergeWord ( RtSegment_t * pSeg, const RtSegment_t * pSrc1, const RtWord_t * pWord1, const RtSegment_t * pSrc2, const RtWord_t * pWord2, RtWordWriter_t & tOut, const CSphVector * pAccKlist ) { assert ( ( !m_bKeywordDict && pWord1->m_uWordID==pWord2->m_uWordID ) || ( m_bKeywordDict && sphDictCmpStrictly ( (const char *)pWord1->m_sWord+1, *pWord1->m_sWord, (const char *)pWord2->m_sWord+1, *pWord2->m_sWord )==0 ) ); RtDocWriter_t tOutDoc ( pSeg ); RtWord_t tWord; if ( !m_bKeywordDict ) tWord.m_uWordID = pWord1->m_uWordID; else tWord.m_sWord = pWord1->m_sWord; tWord.m_uDocs = 0; tWord.m_uHits = 0; tWord.m_uDoc = tOutDoc.ZipDocPtr(); RtDocReader_t tIn1 ( pSrc1, *pWord1 ); RtDocReader_t tIn2 ( pSrc2, *pWord2 ); const RtDoc_t * pDoc1 = tIn1.UnzipDoc(); const RtDoc_t * pDoc2 = tIn2.UnzipDoc(); while ( pDoc1 || pDoc2 ) { if ( pDoc1 && pDoc2 && pDoc1->m_uDocID==pDoc2->m_uDocID ) { // dupe, must (!) be killed in the first segment, might be in both #if 0 assert ( pSrc1->m_dKlist.BinarySearch ( pDoc1->m_uDocID ) || ( pSrc1->m_bTlsKlist && pAcc && pAcc->m_dAccumKlist.BinarySearch ( pDoc1->m_uDocID ) ) ); #endif if ( !pSrc2->m_dKlist.BinarySearch ( pDoc2->m_uDocID ) && ( !pSrc1->m_bTlsKlist || !pSrc2->m_bTlsKlist || !pAccKlist->BinarySearch ( pDoc2->m_uDocID ) ) ) CopyDoc ( pSeg, tOutDoc, &tWord, pSrc2, pDoc2 ); pDoc1 = tIn1.UnzipDoc(); pDoc2 = tIn2.UnzipDoc(); } else if ( pDoc1 && ( !pDoc2 || pDoc1->m_uDocID < pDoc2->m_uDocID ) ) { // winner from the first segment if ( !pSrc1->m_dKlist.BinarySearch ( pDoc1->m_uDocID ) && ( !pSrc1->m_bTlsKlist || !pAccKlist->BinarySearch ( pDoc1->m_uDocID ) ) ) CopyDoc ( pSeg, tOutDoc, &tWord, pSrc1, pDoc1 ); pDoc1 = tIn1.UnzipDoc(); } else { // winner from the second segment assert ( pDoc2 && ( !pDoc1 || pDoc2->m_uDocID < pDoc1->m_uDocID ) ); if ( !pSrc2->m_dKlist.BinarySearch ( pDoc2->m_uDocID ) && ( !pSrc2->m_bTlsKlist || !pAccKlist->BinarySearch ( pDoc2->m_uDocID ) ) ) CopyDoc ( pSeg, tOutDoc, &tWord, pSrc2, pDoc2 ); pDoc2 = tIn2.UnzipDoc(); } } if ( tWord.m_uDocs ) tOut.ZipWord ( tWord ); } #if PARANOID static void CheckSegmentRows ( const RtSegment_t * pSeg, int iStride ) { const CSphVector & dRows = pSeg->m_dRows; // shortcut for ( int i=iStride; i DOCINFO2ID ( &dRows[i-iStride] ) ); } #endif template < typename DOCID = SphDocID_t > struct RtRowIterator_T : public ISphNoncopyable { protected: const CSphRowitem * m_pRow; const CSphRowitem * m_pRowMax; const DOCID * m_pKlist; const DOCID * m_pKlistMax; const DOCID * m_pTlsKlist; const DOCID * m_pTlsKlistMax; const int m_iStride; public: explicit RtRowIterator_T ( const RtSegment_t * pSeg, int iStride, bool bWriter, const CSphVector * pAccKlist ) : m_pRow ( pSeg->m_dRows.Begin() ) , m_pRowMax ( pSeg->m_dRows.Begin() + pSeg->m_dRows.GetLength() ) , m_pKlist ( NULL ) , m_pKlistMax ( NULL ) , m_pTlsKlist ( NULL ) , m_pTlsKlistMax ( NULL ) , m_iStride ( iStride ) { if ( pSeg->m_dKlist.GetLength() ) { m_pKlist = ( const DOCID* ) pSeg->m_dKlist.Begin(); m_pKlistMax = m_pKlist + pSeg->m_dKlist.GetLength(); } // FIXME? OPTIMIZE? must not scan tls (open txn) in readers; can implement lighter iterator // FIXME? OPTIMIZE? maybe we should just rely on the segment order and don't scan tls klist here if ( bWriter && pSeg->m_bTlsKlist && pAccKlist && pAccKlist->GetLength() ) { m_pTlsKlist = pAccKlist->Begin(); m_pTlsKlistMax = m_pTlsKlist + pAccKlist->GetLength(); } } const CSphRowitem * GetNextAliveRow () { // while there are rows and k-list entries while ( m_pRow(m_pRow); // check if segment k-list kills it while ( m_pKlist=m_pRowMax ) return NULL; // got it, and it's alive! m_pRow += m_iStride; return m_pRow-m_iStride; } }; typedef RtRowIterator_T<> RtRowIterator_t; #ifdef PARANOID // sanity check in PARANOID mode template void VerifyEmptyStrings ( const CSphTightVector & dStorage, const CSphSchema & tSchema, const CSphRowitem * pRow ) { if ( dStorage.GetLength()>1 ) return; const DWORD * pAttr = DOCINFO2ATTRS_T(pRow); for ( int i=0; i & dDst ) { assert ( pSrc ); assert ( dDst.GetLength()>=1 ); const BYTE * pStr = NULL; const int iLen = sphUnpackStr ( pSrc, &pStr ); assert ( iLen>0 ); assert ( pStr ); const DWORD uOff = dDst.GetLength(); const DWORD uWriteLen = iLen + ( pStr - pSrc ); // actual length = strings content length + packed length of string dDst.Resize ( uOff + uWriteLen ); memcpy ( dDst.Begin() + uOff, pSrc, uWriteLen ); return uOff; } static DWORD CopyMva ( const DWORD * pSrc, CSphTightVector & dDst ) { assert ( pSrc ); assert ( dDst.GetLength()>=1 ); DWORD uCount = *pSrc; assert ( uCount ); DWORD iLen = dDst.GetLength(); dDst.Resize ( iLen+uCount+1 ); memcpy ( dDst.Begin()+iLen, pSrc, ( uCount+1 )*sizeof(DWORD) ); return iLen; } static void ExtractLocators ( const CSphSchema & tSchema, ESphAttr eAttrType, CSphVector & dLocators ) { for ( int i=0; i m_dLocators; public: explicit StorageStringWriter_t ( const CSphSchema & tSchema, CSphWriter & tDst ) : m_tDst ( tDst ) { ExtractLocators ( tSchema, SPH_ATTR_STRING, m_dLocators ); } const CSphVector & GetLocators () const { return m_dLocators; } void SetDocid ( SphDocID_t ) {} DWORD CopyAttr ( const BYTE * pSrc ) { assert ( m_tDst.GetPos()>0 && m_tDst.GetPos()<( I64C(1)<<32 ) ); // should be 32 bit offset const BYTE * pStr = NULL; const int iLen = sphUnpackStr ( pSrc, &pStr ); assert ( iLen && pStr ); DWORD uAttr = (DWORD)m_tDst.GetPos(); const int iWriteLen = iLen + ( pStr - pSrc ); m_tDst.PutBytes ( pSrc, iWriteLen ); return uAttr; } }; class StorageStringVector_t : ISphNoncopyable { private: CSphTightVector & m_dDst; CSphVector m_dLocators; public: explicit StorageStringVector_t ( const CSphSchema & tSchema, CSphTightVector & dDst ) : m_dDst ( dDst ) { ExtractLocators ( tSchema, SPH_ATTR_STRING, m_dLocators ); } const CSphVector & GetLocators () const { return m_dLocators; } void SetDocid ( SphDocID_t ) {} DWORD CopyAttr ( const BYTE * pSrc ) { assert ( m_dDst.GetLength()>0 && m_dDst.GetLength()<( I64C(1)<<32 ) ); // should be 32 bit offset return CopyPackedString ( pSrc, m_dDst ); } }; class StorageMvaWriter_t : ISphNoncopyable { private: CSphWriter & m_tDst; CSphVector m_dLocators; public: explicit StorageMvaWriter_t ( const CSphSchema & tSchema, CSphWriter & tDst ) : m_tDst ( tDst ) { ExtractLocators ( tSchema, SPH_ATTR_UINT32SET, m_dLocators ); ExtractLocators ( tSchema, SPH_ATTR_UINT64SET, m_dLocators ); } const CSphVector & GetLocators () const { return m_dLocators; } void SetDocid ( SphDocID_t uDocid ) { m_tDst.PutDocid ( uDocid ); } DWORD CopyAttr ( const DWORD * pSrc ) { assert ( m_tDst.GetPos()>0 && m_tDst.GetPos()<( I64C(1)<<32 ) ); // should be 32 bit offset DWORD uCount = *pSrc; assert ( uCount ); SphOffset_t uOff = m_tDst.GetPos(); assert ( ( uOff%sizeof(DWORD) )==0 ); m_tDst.PutBytes ( pSrc, ( uCount+1 )*sizeof(DWORD) ); return MVA_DOWNSIZE ( uOff/sizeof(DWORD) ); } }; class StorageMvaVector_t : ISphNoncopyable { private: CSphTightVector & m_dDst; CSphVector m_dLocators; public: explicit StorageMvaVector_t ( const CSphSchema & tSchema, CSphTightVector & dDst ) : m_dDst ( dDst ) { ExtractLocators ( tSchema, SPH_ATTR_UINT32SET, m_dLocators ); ExtractLocators ( tSchema, SPH_ATTR_UINT64SET, m_dLocators ); } const CSphVector & GetLocators () const { return m_dLocators; } void SetDocid ( SphDocID_t ) {} DWORD CopyAttr ( const DWORD * pSrc ) { assert ( m_dDst.GetLength()>0 && m_dDst.GetLength()<( I64C(1)<<32 ) ); // should be 32 bit offset return CopyMva ( pSrc, m_dDst ); } }; template void CopyFixupStorageAttrs ( const CSphTightVector & dSrc, STORAGE & tStorage, CSphRowitem * pRow ) { const CSphVector & dLocators = tStorage.GetLocators(); if ( !dLocators.GetLength() ) return; // store string\mva attr for this row SphDocID_t uDocid = DOCINFO2ID ( pRow ); DWORD * pAttr = DOCINFO2ATTRS_T( pRow ); bool bIdSet = false; ARRAY_FOREACH ( i, dLocators ) { const SphAttr_t uOff = sphGetRowAttr ( pAttr, dLocators[i] ); if ( !uOff ) continue; assert ( uOff && uOff * pAccKlist ) { if ( pSeg1->m_iTag > pSeg2->m_iTag ) Swap ( pSeg1, pSeg2 ); RtSegment_t * pSeg = new RtSegment_t (); //////////////////// // merge attributes //////////////////// // check that all the IDs are in proper asc order #if PARANOID CheckSegmentRows ( pSeg1, m_iStride ); CheckSegmentRows ( pSeg2, m_iStride ); #endif // just a shortcut CSphVector & dRows = pSeg->m_dRows; CSphTightVector & dStrings = pSeg->m_dStrings; CSphTightVector & dMvas = pSeg->m_dMvas; // we might need less because of dupes, but we can not know yet dRows.Reserve ( pSeg1->m_dRows.GetLength() + pSeg2->m_dRows.GetLength() ); // as each segment has dummy zero we reserve less assert ( pSeg1->m_dStrings.GetLength() + pSeg2->m_dStrings.GetLength()>=2 ); dStrings.Reserve ( pSeg1->m_dStrings.GetLength() + pSeg2->m_dStrings.GetLength() - 2 ); assert ( pSeg1->m_dMvas.GetLength() + pSeg2->m_dMvas.GetLength()>=2 ); dMvas.Reserve ( pSeg1->m_dMvas.GetLength() + pSeg2->m_dMvas.GetLength() - 2 ); StorageStringVector_t tStorageString ( m_tSchema, dStrings ); StorageMvaVector_t tStorageMva ( m_tSchema, dMvas ); RtRowIterator_t tIt1 ( pSeg1, m_iStride, true, pAccKlist ); RtRowIterator_t tIt2 ( pSeg2, m_iStride, true, pAccKlist ); const CSphRowitem * pRow1 = tIt1.GetNextAliveRow(); const CSphRowitem * pRow2 = tIt2.GetNextAliveRow(); while ( pRow1 || pRow2 ) { if ( !pRow2 || ( pRow1 && pRow2 && DOCINFO2ID(pRow1) ( pSeg1->m_dStrings, tStorageString, pDstRow ); CopyFixupStorageAttrs ( pSeg1->m_dMvas, tStorageMva, pDstRow ); pRow1 = tIt1.GetNextAliveRow(); } else { assert ( pRow2 ); assert ( !pRow1 || ( DOCINFO2ID(pRow1)!=DOCINFO2ID(pRow2) ) ); // all dupes must be killed and skipped by the iterator for ( int i=0; i ( pSeg2->m_dStrings, tStorageString, pDstRow ); CopyFixupStorageAttrs ( pSeg2->m_dMvas, tStorageMva, pDstRow ); pRow2 = tIt2.GetNextAliveRow(); } pSeg->m_iRows++; pSeg->m_iAliveRows++; } assert ( pSeg->m_iRows*m_iStride==pSeg->m_dRows.GetLength() ); #if PARANOID CheckSegmentRows ( pSeg, m_iStride ); #endif ////////////////// // merge keywords ////////////////// pSeg->m_dWords.Reserve ( pSeg1->m_dWords.GetLength() + pSeg2->m_dWords.GetLength() ); pSeg->m_dDocs.Reserve ( pSeg1->m_dDocs.GetLength() + pSeg2->m_dDocs.GetLength() ); pSeg->m_dHits.Reserve ( pSeg1->m_dHits.GetLength() + pSeg2->m_dHits.GetLength() ); RtWordWriter_t tOut ( pSeg, m_bKeywordDict, m_iWordsCheckpoint ); RtWordReader_t tIn1 ( pSeg1, m_bKeywordDict, m_iWordsCheckpoint ); RtWordReader_t tIn2 ( pSeg2, m_bKeywordDict, m_iWordsCheckpoint ); const RtWord_t * pWords1 = tIn1.UnzipWord (); const RtWord_t * pWords2 = tIn2.UnzipWord (); // merge while there are common words for ( ;; ) { while ( pWords1 && pWords2 ) { int iCmp = 0; if ( m_bKeywordDict ) { iCmp = sphDictCmpStrictly ( (const char *)pWords1->m_sWord+1, *pWords1->m_sWord, (const char *)pWords2->m_sWord+1, *pWords2->m_sWord ); } else { if ( pWords1->m_uWordIDm_uWordID ) iCmp = -1; else if ( pWords1->m_uWordID>pWords2->m_uWordID ) iCmp = 1; } if ( iCmp==0 ) break; if ( iCmp<0 ) pWords1 = CopyWord ( pSeg, tOut, pSeg1, pWords1, tIn1, pAccKlist ); else pWords2 = CopyWord ( pSeg, tOut, pSeg2, pWords2, tIn2, pAccKlist ); } if ( !pWords1 || !pWords2 ) break; assert ( pWords1 && pWords2 && ( ( !m_bKeywordDict && pWords1->m_uWordID==pWords2->m_uWordID ) || ( m_bKeywordDict && sphDictCmpStrictly ( (const char *)pWords1->m_sWord+1, *pWords1->m_sWord, (const char *)pWords2->m_sWord+1, *pWords2->m_sWord )==0 ) ) ); MergeWord ( pSeg, pSeg1, pWords1, pSeg2, pWords2, tOut, pAccKlist ); pWords1 = tIn1.UnzipWord(); pWords2 = tIn2.UnzipWord(); } // copy tails while ( pWords1 ) pWords1 = CopyWord ( pSeg, tOut, pSeg1, pWords1, tIn1, pAccKlist ); while ( pWords2 ) pWords2 = CopyWord ( pSeg, tOut, pSeg2, pWords2, tIn2, pAccKlist ); if ( m_bKeywordDict ) FixupSegmentCheckpoints ( pSeg ); assert ( pSeg->m_dRows.GetLength() ); assert ( pSeg->m_iRows ); assert ( pSeg->m_iAliveRows==pSeg->m_iRows ); return pSeg; } struct CmpSegments_fn { inline bool IsLess ( const RtSegment_t * a, const RtSegment_t * b ) { return a->GetMergeFactor() > b->GetMergeFactor(); } }; void RtIndex_t::Commit () { assert ( g_bRTChangesAllowed ); MEMORY ( SPH_MEM_IDX_RT ); RtAccum_t * pAcc = AcquireAccum(); if ( !pAcc ) return; // empty txn, just ignore if ( !pAcc->m_iAccumDocs && !pAcc->m_dAccumKlist.GetLength() ) { pAcc->m_pIndex = NULL; pAcc->m_iAccumDocs = 0; pAcc->m_dAccumRows.Resize ( 0 ); pAcc->m_dStrings.Resize ( 1 ); pAcc->m_dPerDocHitsCount.Resize ( 0 ); pAcc->ResetDict(); return; } // phase 0, build a new segment // accum and segment are thread local; so no locking needed yet // segment might be NULL if we're only killing rows this txn pAcc->CleanupDuplacates ( m_tSchema.GetRowSize() ); pAcc->Sort(); RtSegment_t * pNewSeg = pAcc->CreateSegment ( m_tSchema.GetRowSize(), m_iWordsCheckpoint ); assert ( !pNewSeg || pNewSeg->m_iRows>0 ); assert ( !pNewSeg || pNewSeg->m_iAliveRows>0 ); assert ( !pNewSeg || pNewSeg->m_bTlsKlist==false ); #if PARANOID if ( pNewSeg ) CheckSegmentRows ( pNewSeg, m_iStride ); #endif // clean up parts we no longer need pAcc->m_dAccum.Resize ( 0 ); pAcc->m_dAccumRows.Resize ( 0 ); pAcc->m_dStrings.Resize ( 1 ); // handle dummy zero offset pAcc->m_dPerDocHitsCount.Resize ( 0 ); pAcc->ResetDict(); // sort accum klist, too pAcc->m_dAccumKlist.Uniq (); // now on to the stuff that needs locking and recovery CommitReplayable ( pNewSeg, pAcc->m_dAccumKlist ); // done; cleanup accum pAcc->m_pIndex = NULL; pAcc->m_iAccumDocs = 0; pAcc->m_dAccumKlist.Reset(); } void RtIndex_t::CommitReplayable ( RtSegment_t * pNewSeg, CSphVector & dAccKlist ) { int iNewDocs = pNewSeg ? pNewSeg->m_iRows : 0; // phase 1, lock out other writers (but not readers yet) // concurrent readers are ok during merges, as existing segments won't be modified yet // however, concurrent writers are not Verify ( m_tWriterMutex.Lock() ); // first of all, binlog txn data for recovery g_pRtBinlog->BinlogCommit ( &m_iTID, m_sIndexName.cstr(), pNewSeg, dAccKlist, m_bKeywordDict ); // let merger know that existing segments are subject to additional, TLS K-list filter // safe despite the readers, flag must only be used by writer if ( dAccKlist.GetLength() ) ARRAY_FOREACH ( i, m_pSegments ) { // OPTIMIZE? only need to set the flag if TLS K-list *actually* affects segment assert ( m_pSegments[i]->m_bTlsKlist==false ); m_pSegments[i]->m_bTlsKlist = true; } // prepare new segments vector // create more new segments by merging as needed // do not (!) kill processed old segments just yet, as readers might still need them CSphVector dSegments; CSphVector dToKill; dSegments = m_pSegments; if ( pNewSeg ) dSegments.Add ( pNewSeg ); int64_t iRamFreed = 0; // enforce RAM usage limit int64_t iRamLeft = m_iRamSize; ARRAY_FOREACH ( i, dSegments ) iRamLeft = Max ( iRamLeft - dSegments[i]->GetUsedRam(), 0 ); // skip merging if no rows were added or no memory left bool bDump = ( iRamLeft==0 ); const int MAX_SEGMENTS = 32; const int MAX_PROGRESSION_SEGMENT = 8; while ( pNewSeg && iRamLeft>0 ) { dSegments.Sort ( CmpSegments_fn() ); // unconditionally merge if there's too much segments now // conditionally merge if smallest segment has grown too large // otherwise, we're done const int iLen = dSegments.GetLength(); if ( iLen < ( MAX_SEGMENTS - MAX_PROGRESSION_SEGMENT ) ) break; assert ( iLen>=2 ); // exit if progression is kept AND lesser MAX_SEGMENTS limit if ( dSegments[iLen-2]->GetMergeFactor() > dSegments[iLen-1]->GetMergeFactor()*2 && iLen < MAX_SEGMENTS ) break; // check whether we have enough RAM #define LOC_ESTIMATE1(_seg,_vec) \ (int)( ( (int64_t)_seg->_vec.GetLength() ) * _seg->m_iAliveRows / _seg->m_iRows ) #define LOC_ESTIMATE(_vec) \ ( LOC_ESTIMATE1 ( dSegments[iLen-1], _vec ) + LOC_ESTIMATE1 ( dSegments[iLen-2], _vec ) ) int64_t iEstimate = CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dWords ) ) + CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dDocs ) ) + CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dHits ) ) + CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dStrings ) + CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dMvas ) ) + CSphTightVectorPolicy::Relimit ( 0, LOC_ESTIMATE ( m_dKeywordCheckpoints ) ) ); #undef LOC_ESTIMATE #undef LOC_ESTIMATE1 if ( iEstimate>iRamLeft ) { // dump case: can't merge any more AND segments count limit's reached bDump = ( ( iRamLeft + iRamFreed )<=iEstimate ) && ( iLen>=MAX_SEGMENTS ); break; } // do it RtSegment_t * pA = dSegments.Pop(); RtSegment_t * pB = dSegments.Pop(); dSegments.Add ( MergeSegments ( pA, pB, &dAccKlist ) ); dToKill.Add ( pA ); dToKill.Add ( pB ); iRamFreed += pA->GetUsedRam() + pB->GetUsedRam(); int64_t iMerged = dSegments.Last()->GetUsedRam(); iRamLeft -= Min ( iRamLeft, iMerged ); } // phase 2, obtain exclusive writer lock // we now have to update K-lists in (some of) the survived segments // and also swap in new segment list m_tRwlock.WriteLock (); // adjust for an incoming accumulator K-list int iTotalKilled = 0; if ( dAccKlist.GetLength() ) { #ifndef NDEBUG #if PARANOID // check that klist is sorted and unique for ( int i=1; iFindAliveRow ( uDocid )!=NULL ); bool bDiskKilled = m_tKlist.Exists ( uDocid ); // check disk chunks bool bKeep = false; if ( !bRamKilled || !bDiskKilled ) { for ( int j=m_pDiskChunks.GetLength()-1; j>=0 && !bKeep; j-- ) { if ( m_pDiskChunks[j]->HasDocid ( uDocid ) ) { // we just found the most recent chunk with our suspect docid // let's check whether it's already killed by subsequent chunks, or gets killed now SphAttr_t uRef = uDocid; bKeep = true; for ( int k=j+1; kGetKillList(), pIndex->GetKillList() + pIndex->GetKillListSize() - 1, uRef )==NULL ); } } } } if ( bRamKilled || bKeep ) iTotalKilled++; if ( bDiskKilled || !bKeep ) { Swap ( dAccKlist[i], dAccKlist[iDiskLiveKLen-1] ); iDiskLiveKLen--; i--; } } // update K-lists on survivors ARRAY_FOREACH ( iSeg, dSegments ) { RtSegment_t * pSeg = dSegments[iSeg]; if ( !pSeg->m_bTlsKlist ) continue; // should be fresh enough // this segment was not created by this txn // so we need to merge additional K-list from current txn into it CSphVector dKlistAddon; ARRAY_FOREACH ( j, dAccKlist ) { // tricky bit! // we can NOT append ids to segment k-list directly // because FindAliveRow() will binary search it // and it will expect a sorted list SphDocID_t uDocid = dAccKlist[j]; if ( pSeg->FindAliveRow ( uDocid ) ) dKlistAddon.Add ( uDocid ); } // now actually update it if ( dKlistAddon.GetLength() ) { // copy data, update counters ARRAY_FOREACH ( i, dKlistAddon ) pSeg->m_dKlist.Add ( dKlistAddon[i] ); pSeg->m_iAliveRows -= dKlistAddon.GetLength(); assert ( pSeg->m_iAliveRows>=0 ); // we did not check for existence in K-list, only in segment // so need to use Uniq(), not just Sort() pSeg->m_dKlist.Uniq (); } // mark as good pSeg->m_bTlsKlist = false; } // update disk K-list // after iDiskLiveKLen are ids already stored on disk - just skip them for ( int i=0; im_iAliveRows==0 ) { dToKill.Add ( pSeg ); dSegments.RemoveFast ( i ); i--; } } // go live! Swap ( m_pSegments, dSegments ); // we can kill retired segments now ARRAY_FOREACH ( i, dToKill ) SafeDelete ( dToKill[i] ); // update stats m_tStats.m_iTotalDocuments += iNewDocs - iTotalKilled; // phase 3, enable readers again // we might need to dump data to disk now // but during the dump, readers can still use RAM chunk data Verify ( m_tRwlock.Unlock() ); if ( bDump ) { SaveDiskChunk(); g_pBinlog->NotifyIndexFlush ( m_sIndexName.cstr(), m_iTID, false ); } // all done, enable other writers Verify ( m_tWriterMutex.Unlock() ); } void RtIndex_t::RollBack () { assert ( g_bRTChangesAllowed ); RtAccum_t * pAcc = AcquireAccum(); if ( !pAcc ) return; // clean up parts we no longer need pAcc->m_dAccum.Resize ( 0 ); pAcc->m_dAccumRows.Resize ( 0 ); // finish cleaning up and release accumulator pAcc->m_pIndex = NULL; pAcc->m_iAccumDocs = 0; pAcc->m_dAccumKlist.Reset(); } bool RtIndex_t::DeleteDocument ( const SphDocID_t * pDocs, int iDocs, CSphString & sError ) { assert ( g_bRTChangesAllowed ); MEMORY ( SPH_MEM_IDX_RT_ACCUM ); RtAccum_t * pAcc = AcquireAccum ( &sError ); if ( !pAcc ) return false; if ( !iDocs ) return true; assert ( pDocs && iDocs ); // !COMMIT should handle case when uDoc what inserted in current txn here while ( iDocs-- ) pAcc->m_dAccumKlist.Add ( *pDocs++ ); return true; } ////////////////////////////////////////////////////////////////////////// // LOAD/SAVE ////////////////////////////////////////////////////////////////////////// struct Checkpoint_t { uint64_t m_uWord; uint64_t m_uOffset; }; void RtIndex_t::ForceDiskChunk () { MEMORY ( SPH_MEM_IDX_RT ); Verify ( m_tWriterMutex.Lock() ); Verify ( m_tRwlock.WriteLock() ); SaveDiskChunk(); Verify ( m_tRwlock.Unlock() ); Verify ( m_tWriterMutex.Unlock() ); } // Here is the devil of saving id32 chunk from id64 binary daemon template < typename DOCID, typename WORDID > void RtIndex_t::SaveDiskDataImpl ( const char * sFilename ) const { typedef RtDoc_T RTDOC; typedef RtWord_T RTWORD; CSphString sName, sError; // FIXME!!! report collected (sError) errors CSphWriter wrHits, wrDocs, wrDict, wrRows; sName.SetSprintf ( "%s.spp", sFilename ); wrHits.OpenFile ( sName.cstr(), sError ); sName.SetSprintf ( "%s.spd", sFilename ); wrDocs.OpenFile ( sName.cstr(), sError ); sName.SetSprintf ( "%s.spi", sFilename ); wrDict.OpenFile ( sName.cstr(), sError ); sName.SetSprintf ( "%s.spa", sFilename ); wrRows.OpenFile ( sName.cstr(), sError ); BYTE bDummy = 1; wrDict.PutBytes ( &bDummy, 1 ); wrDocs.PutBytes ( &bDummy, 1 ); wrHits.PutBytes ( &bDummy, 1 ); // we don't have enough RAM to create new merged segments // and have to do N-way merge kinda in-place CSphVector*> pWordReaders; CSphVector*> pDocReaders; CSphVector pSegments; CSphVector pWords; CSphVector pDocs; pWordReaders.Reserve ( m_pSegments.GetLength() ); pDocReaders.Reserve ( m_pSegments.GetLength() ); pSegments.Reserve ( m_pSegments.GetLength() ); pWords.Reserve ( m_pSegments.GetLength() ); pDocs.Reserve ( m_pSegments.GetLength() ); // OPTIMIZE? somehow avoid new on iterators maybe? ARRAY_FOREACH ( i, m_pSegments ) pWordReaders.Add ( new RtWordReader_T ( m_pSegments[i], m_bKeywordDict, m_iWordsCheckpoint ) ); ARRAY_FOREACH ( i, pWordReaders ) pWords.Add ( pWordReaders[i]->UnzipWord() ); // loop keywords CSphVector dCheckpoints; CSphVector dKeywordCheckpoints; int iWords = 0; CSphKeywordDeltaWriter tLastWord; WORDID uLastWordID = 0; SphOffset_t uLastDocpos = 0; for ( ;; ) { // find keyword with min id const RTWORD * pWord = NULL; ARRAY_FOREACH ( i, pWords ) // OPTIMIZE? PQ or at least nulls removal here?! { if ( pWords[i] ) { if ( !pWord || ( !m_bKeywordDict && pWords[i]->m_uWordIDm_uWordID ) || ( m_bKeywordDict && sphDictCmpStrictly ( (const char *)pWords[i]->m_sWord+1, *pWords[i]->m_sWord, (const char *)pWord->m_sWord+1, *pWord->m_sWord )<0 ) ) { pWord = pWords[i]; } } } if ( !pWord ) break; // loop all segments that have this keyword assert ( pSegments.GetLength()==0 ); assert ( pDocReaders.GetLength()==0 ); assert ( pDocs.GetLength()==0 ); ARRAY_FOREACH ( i, pWords ) if ( pWords[i] && ( ( !m_bKeywordDict && pWords[i]->m_uWordID==pWord->m_uWordID ) || ( m_bKeywordDict && sphDictCmpStrictly ( (const char *)pWords[i]->m_sWord+1, *pWords[i]->m_sWord, (const char *)pWord->m_sWord+1, *pWord->m_sWord )==0 ) ) ) { pSegments.Add ( m_pSegments[i] ); pDocReaders.Add ( new RtDocReader_T ( m_pSegments[i], *pWords[i] ) ); const RTDOC * pDoc = pDocReaders.Last()->UnzipDoc(); while ( pDoc && m_pSegments[i]->m_dKlist.BinarySearch ( pDoc->m_uDocID ) ) pDoc = pDocReaders.Last()->UnzipDoc(); pDocs.Add ( pDoc ); } // loop documents SphOffset_t uDocpos = wrDocs.GetPos(); DOCID uLastDoc = 0; SphOffset_t uLastHitpos = 0; int iDocs = 0; int iHits = 0; for ( ;; ) { // find alive doc with min id int iMinReader = -1; ARRAY_FOREACH ( i, pDocs ) // OPTIMIZE? { if ( !pDocs[i] ) continue; assert ( !pSegments[i]->m_dKlist.BinarySearch ( pDocs[i]->m_uDocID ) ); if ( iMinReader<0 || pDocs[i]->m_uDocID < pDocs[iMinReader]->m_uDocID ) iMinReader = i; } if ( iMinReader<0 ) break; // write doclist entry const RTDOC * pDoc = pDocs[iMinReader]; // shortcut iDocs++; iHits += pDoc->m_uHits; wrDocs.ZipOffset ( pDoc->m_uDocID - uLastDoc ); wrDocs.ZipOffset ( wrHits.GetPos() - uLastHitpos ); wrDocs.ZipInt ( pDoc->m_uDocFields ); wrDocs.ZipInt ( pDoc->m_uHits ); uLastDoc = pDoc->m_uDocID; uLastHitpos = wrHits.GetPos(); // loop hits from most current segment if ( pDoc->m_uHits>1 ) { DWORD uLastHit = 0; RtHitReader_t tInHit ( pSegments[iMinReader], pDoc ); for ( DWORD uValue=tInHit.UnzipHit(); uValue; uValue=tInHit.UnzipHit() ) { wrHits.ZipInt ( uValue - uLastHit ); uLastHit = uValue; } } else { wrHits.ZipInt ( pDoc->m_uHit ); } wrHits.ZipInt ( 0 ); // fast forward readers DOCID uMinID = pDocs[iMinReader]->m_uDocID; ARRAY_FOREACH ( i, pDocs ) while ( pDocs[i] && ( pDocs[i]->m_uDocID<=uMinID || pSegments[i]->m_dKlist.BinarySearch ( pDocs[i]->m_uDocID ) ) ) pDocs[i] = pDocReaders[i]->UnzipDoc(); } // write dict entry if necessary if ( wrDocs.GetPos()!=uDocpos ) { wrDocs.ZipInt ( 0 ); // docs over if ( ( iWords%SPH_WORDLIST_CHECKPOINT )==0 ) { if ( iWords ) { SphOffset_t uOff = m_bKeywordDict ? 0 : uDocpos - uLastDocpos; wrDict.ZipInt ( 0 ); wrDict.ZipOffset ( uOff ); // store last hitlist length } // restart delta coding, once per SPH_WORDLIST_CHECKPOINT entries uLastDocpos = 0; uLastWordID = 0; tLastWord.Reset(); // begin new wordlist entry Checkpoint_t & tChk = dCheckpoints.Add (); tChk.m_uOffset = wrDict.GetPos(); if ( m_bKeywordDict ) { // copy word len + word itself to checkpoint storage tChk.m_uWord = sphPutBytes ( &dKeywordCheckpoints, pWord->m_sWord, pWord->m_sWord[0]+1 ); } else { tChk.m_uWord = pWord->m_uWordID; } } iWords++; if ( m_bKeywordDict ) { tLastWord.PutDelta ( wrDict, pWord->m_sWord+1, pWord->m_sWord[0] ); wrDict.ZipOffset ( uDocpos ); } else { assert ( pWord->m_uWordID!=uLastWordID ); wrDict.ZipOffset ( pWord->m_uWordID - uLastWordID ); uLastWordID = pWord->m_uWordID; assert ( uDocpos>uLastDocpos ); wrDict.ZipOffset ( uDocpos - uLastDocpos ); } wrDict.ZipInt ( iDocs ); wrDict.ZipInt ( iHits ); if ( m_bKeywordDict ) { BYTE uHint = sphDoclistHintPack ( iDocs, wrDocs.GetPos()-uLastDocpos ); if ( uHint ) wrDict.PutByte ( uHint ); } uLastDocpos = uDocpos; } // move words forward // because pWord contents will move forward too! WORDID uMinID = pWord->m_uWordID; char sMinWord[SPH_MAX_KEYWORD_LEN]; int iMinWordLen = 0; if ( m_bKeywordDict ) { iMinWordLen = pWord->m_sWord[0]; assert ( iMinWordLenm_sWord+1, iMinWordLen ); } ARRAY_FOREACH ( i, pWords ) { if ( pWords[i] && ( ( !m_bKeywordDict && pWords[i]->m_uWordID==uMinID ) || ( m_bKeywordDict && sphDictCmpStrictly ( (const char *)pWords[i]->m_sWord+1, pWords[i]->m_sWord[0], sMinWord, iMinWordLen )==0 ) ) ) { pWords[i] = pWordReaders[i]->UnzipWord(); } } // cleanup ARRAY_FOREACH ( i, pDocReaders ) SafeDelete ( pDocReaders[i] ); pSegments.Resize ( 0 ); pDocReaders.Resize ( 0 ); pDocs.Resize ( 0 ); } // write checkpoints SphOffset_t uOff = m_bKeywordDict ? 0 : wrDocs.GetPos() - uLastDocpos; // FIXME!!! don't write to wrDict if iWords==0 // however plain index becomes m_bIsEmpty and full scan does not work there // we'll get partly working RT ( RAM chunk works and disk chunks give empty result set ) wrDict.ZipInt ( 0 ); // indicate checkpoint wrDict.ZipOffset ( uOff ); // store last doclist length SphOffset_t iCheckpointsPosition = wrDict.GetPos(); if ( m_bKeywordDict ) { const char * pCheckpoints = (const char *)dKeywordCheckpoints.Begin(); ARRAY_FOREACH ( i, dCheckpoints ) { const char * pPacked = pCheckpoints + dCheckpoints[i].m_uWord; int iLen = *pPacked; assert ( iLen && (int)dCheckpoints[i].m_uWord+1+iLen<=dKeywordCheckpoints.GetLength() ); wrDict.PutDword ( iLen ); wrDict.PutBytes ( pPacked+1, iLen ); wrDict.PutOffset ( dCheckpoints[i].m_uOffset ); } } else { ARRAY_FOREACH ( i, dCheckpoints ) { wrDict.PutOffset ( dCheckpoints[i].m_uWord ); wrDict.PutOffset ( dCheckpoints[i].m_uOffset ); } } //////////////////// // write attributes //////////////////// // the new, template-param aligned iStride instead of index-wide int iStride = DWSIZEOF(DOCID) + m_tSchema.GetRowSize(); CSphVector*> pRowIterators ( m_pSegments.GetLength() ); ARRAY_FOREACH ( i, m_pSegments ) pRowIterators[i] = new RtRowIterator_T ( m_pSegments[i], iStride, false, NULL ); CSphVector pRows ( m_pSegments.GetLength() ); ARRAY_FOREACH ( i, pRowIterators ) pRows[i] = pRowIterators[i]->GetNextAliveRow(); // prepare to build min-max index for attributes too int iTotalDocs = 0; ARRAY_FOREACH ( i, m_pSegments ) iTotalDocs += m_pSegments[i]->m_iAliveRows; AttrIndexBuilder_t tMinMaxBuilder ( m_tSchema ); CSphVector dMinMaxBuffer ( tMinMaxBuilder.GetExpectedSize ( iTotalDocs ) ); tMinMaxBuilder.Prepare ( dMinMaxBuffer.Begin(), dMinMaxBuffer.Begin() + dMinMaxBuffer.GetLength() ); sName.SetSprintf ( "%s.sps", sFilename ); CSphWriter tStrWriter; tStrWriter.OpenFile ( sName.cstr(), sError ); tStrWriter.PutByte ( 0 ); // dummy byte, to reserve magic zero offset sName.SetSprintf ( "%s.spm", sFilename ); CSphWriter tMvaWriter; tMvaWriter.OpenFile ( sName.cstr(), sError ); tMvaWriter.PutDword ( 0 ); // dummy dword, to reserve magic zero offset CSphRowitem * pFixedRow = new CSphRowitem[iStride]; #ifndef NDEBUG int iStoredDocs = 0; #endif StorageStringWriter_t tStorageString ( m_tSchema, tStrWriter ); StorageMvaWriter_t tStorageMva ( m_tSchema, tMvaWriter ); for ( ;; ) { // find min row int iMinRow = -1; ARRAY_FOREACH ( i, pRows ) if ( pRows[i] ) if ( iMinRow<0 || DOCINFO2ID_T ( pRows[i] ) < DOCINFO2ID_T ( pRows[iMinRow] ) ) iMinRow = i; if ( iMinRow<0 ) break; #ifndef NDEBUG // verify that it's unique int iDupes = 0; ARRAY_FOREACH ( i, pRows ) if ( pRows[i] ) if ( DOCINFO2ID_T ( pRows[i] )==DOCINFO2ID_T ( pRows[iMinRow] ) ) iDupes++; assert ( iDupes==1 ); #endif const CSphRowitem * pRow = pRows[iMinRow]; // strings storage for stored row assert ( iMinRow ( pSegment->m_dStrings, m_tSchema, pRow ); #endif // collect min-max data tMinMaxBuilder.Collect ( pRow, pSegment->m_dMvas.Begin(), pSegment->m_dMvas.GetLength(), sError, false ); if ( pSegment->m_dStrings.GetLength()>1 || pSegment->m_dMvas.GetLength()>1 ) // should be more then dummy zero elements { // copy row content as we'll fix up its attrs ( string offset for now ) memcpy ( pFixedRow, pRow, iStride*sizeof(CSphRowitem) ); pRow = pFixedRow; CopyFixupStorageAttrs ( pSegment->m_dStrings, tStorageString, pFixedRow ); CopyFixupStorageAttrs ( pSegment->m_dMvas, tStorageMva, pFixedRow ); } // emit it wrRows.PutBytes ( pRow, iStride*sizeof(CSphRowitem) ); // fast forward pRows[iMinRow] = pRowIterators[iMinRow]->GetNextAliveRow(); #ifndef NDEBUG iStoredDocs++; #endif } SafeDeleteArray ( pFixedRow ); assert ( iStoredDocs==iTotalDocs ); tMinMaxBuilder.FinishCollect ( false ); if ( tMinMaxBuilder.GetActualSize() ) wrRows.PutBytes ( dMinMaxBuffer.Begin(), sizeof(DWORD) * tMinMaxBuilder.GetActualSize() ); tMvaWriter.CloseFile(); tStrWriter.CloseFile (); // write dummy kill-list files CSphWriter wrDummy; // dump killlist sName.SetSprintf ( "%s.spk", sFilename ); wrDummy.OpenFile ( sName.cstr(), sError ); m_tKlist.Flush(); m_tKlist.KillListLock(); DWORD uKlistSize = m_tKlist.GetKillListSize(); if ( uKlistSize ) wrDummy.PutBytes ( m_tKlist.GetKillList(), uKlistSize*sizeof ( SphAttr_t ) ); m_tKlist.Reset(); m_tKlist.KillListUnlock(); wrDummy.CloseFile (); // header SaveDiskHeader ( sFilename, dCheckpoints.GetLength(), iCheckpointsPosition, uKlistSize, iTotalDocs*iStride, m_bId32to64 ); // cleanup ARRAY_FOREACH ( i, pWordReaders ) SafeDelete ( pWordReaders[i] ); ARRAY_FOREACH ( i, pDocReaders ) SafeDelete ( pDocReaders[i] ); ARRAY_FOREACH ( i, pRowIterators ) SafeDelete ( pRowIterators[i] ); // done wrHits.CloseFile (); wrDocs.CloseFile (); wrDict.CloseFile (); wrRows.CloseFile (); } void RtIndex_t::SaveDiskData ( const char * sFilename ) const { if ( m_bId32to64 ) return SaveDiskDataImpl (sFilename); else return SaveDiskDataImpl (sFilename); } void RtIndex_t::SaveDiskHeader ( const char * sFilename, int iCheckpoints, SphOffset_t iCheckpointsPosition, DWORD uKillListSize, DWORD uMinMaxSize, bool bForceID32 ) const { static const DWORD INDEX_MAGIC_HEADER = 0x58485053; ///< my magic 'SPHX' header static const DWORD INDEX_FORMAT_VERSION = 26; ///< my format version CSphWriter tWriter; CSphString sName, sError; sName.SetSprintf ( "%s.sph", sFilename ); tWriter.OpenFile ( sName.cstr(), sError ); // format tWriter.PutDword ( INDEX_MAGIC_HEADER ); tWriter.PutDword ( INDEX_FORMAT_VERSION ); if ( bForceID32 ) tWriter.PutDword ( 0 ); // use-32bit else tWriter.PutDword ( USE_64BIT ); // use-64bit tWriter.PutDword ( SPH_DOCINFO_EXTERN ); // schema WriteSchema ( tWriter, m_tSchema ); // min docid tWriter.PutOffset ( 0 ); // wordlist checkpoints tWriter.PutOffset ( iCheckpointsPosition ); tWriter.PutDword ( iCheckpoints ); // stats tWriter.PutDword ( m_tStats.m_iTotalDocuments ); tWriter.PutOffset ( m_tStats.m_iTotalBytes ); // index settings tWriter.PutDword ( m_tSettings.m_iMinPrefixLen ); tWriter.PutDword ( m_tSettings.m_iMinInfixLen ); tWriter.PutByte ( m_tSettings.m_bHtmlStrip ? 1 : 0 ); tWriter.PutString ( m_tSettings.m_sHtmlIndexAttrs.cstr () ); tWriter.PutString ( m_tSettings.m_sHtmlRemoveElements.cstr () ); tWriter.PutByte ( m_tSettings.m_bIndexExactWords ? 1 : 0 ); tWriter.PutDword ( m_tSettings.m_eHitless ); tWriter.PutDword ( SPH_HIT_FORMAT_PLAIN ); tWriter.PutByte ( 0 ); // m_bIndexSP, v.21+ tWriter.PutString ( CSphString() ); // m_sZonePrefix, v.22+ tWriter.PutDword ( 0 ); // m_iBoundaryStep, v.23+ tWriter.PutDword ( 1 ); // m_iStopwordStep, v.23+ // tokenizer SaveTokenizerSettings ( tWriter, m_pTokenizer ); // dictionary SaveDictionarySettings ( tWriter, m_pDict, m_bKeywordDict ); // kill-list size tWriter.PutDword ( uKillListSize ); // min-max count tWriter.PutDword ( uMinMaxSize ); // done tWriter.CloseFile (); } #if USE_WINDOWS #undef rename int rename ( const char * sOld, const char * sNew ) { if ( MoveFileEx ( sOld, sNew, MOVEFILE_REPLACE_EXISTING ) ) return 0; errno = GetLastError(); return -1; } #endif void RtIndex_t::SaveMeta ( int iDiskChunks ) { // sanity check if ( m_iLockFD<0 ) return; // write new meta CSphString sMeta, sMetaNew; sMeta.SetSprintf ( "%s.meta", m_sPath.cstr() ); sMetaNew.SetSprintf ( "%s.meta.new", m_sPath.cstr() ); CSphString sError; CSphWriter wrMeta; if ( !wrMeta.OpenFile ( sMetaNew, sError ) ) sphDie ( "failed to serialize meta: %s", sError.cstr() ); // !COMMIT handle this gracefully wrMeta.PutDword ( META_HEADER_MAGIC ); wrMeta.PutDword ( META_VERSION ); wrMeta.PutDword ( iDiskChunks ); wrMeta.PutDword ( m_tStats.m_iTotalDocuments ); wrMeta.PutOffset ( m_tStats.m_iTotalBytes ); // FIXME? need PutQword ideally wrMeta.PutOffset ( m_iTID ); // meta v.4, save disk index format and settings, too wrMeta.PutDword ( INDEX_FORMAT_VERSION ); WriteSchema ( wrMeta, m_tSchema ); SaveIndexSettings ( wrMeta, m_tSettings ); SaveTokenizerSettings ( wrMeta, m_pTokenizer ); SaveDictionarySettings ( wrMeta, m_pDict, m_bKeywordDict ); // meta v.5 wrMeta.PutDword ( m_iWordsCheckpoint ); wrMeta.CloseFile(); // FIXME? handle errors? // rename if ( ::rename ( sMetaNew.cstr(), sMeta.cstr() ) ) sphDie ( "failed to rename meta (src=%s, dst=%s, errno=%d, error=%s)", sMetaNew.cstr(), sMeta.cstr(), errno, strerror(errno) ); // !COMMIT handle this gracefully } void RtIndex_t::SaveDiskChunk () { if ( !m_pSegments.GetLength() ) return; MEMORY ( SPH_MEM_IDX_RT ); // dump it CSphString sNewChunk; sNewChunk.SetSprintf ( "%s.%d", m_sPath.cstr(), m_pDiskChunks.GetLength() ); SaveDiskData ( sNewChunk.cstr() ); // bring new disk chunk online CSphIndex * pDiskChunk = LoadDiskChunk ( m_pDiskChunks.GetLength() ); assert ( pDiskChunk ); // save updated meta SaveMeta ( m_pDiskChunks.GetLength()+1 ); m_iSavedTID = m_iTID; m_iSavedRam = 0; m_tmSaved = sphMicroTimer(); // FIXME! add binlog cleanup here once we have binlogs // get exclusive lock again, gotta reset RAM chunk now Verify ( m_tRwlock.WriteLock() ); ARRAY_FOREACH ( i, m_pSegments ) SafeDelete ( m_pSegments[i] ); m_pSegments.Reset(); m_pDiskChunks.Add ( pDiskChunk ); pDiskChunk->SetEnableStar ( m_bEnableStar ); Verify ( m_tRwlock.Unlock() ); } CSphIndex * RtIndex_t::LoadDiskChunk ( int iChunk ) { MEMORY ( SPH_MEM_IDX_DISK ); CSphString sChunk, sError, sWarning; sChunk.SetSprintf ( "%s.%d", m_sPath.cstr(), iChunk ); // !COMMIT handle errors gracefully instead of dying CSphIndex * pDiskChunk = sphCreateIndexPhrase ( m_sIndexName.cstr(), sChunk.cstr() ); if ( !pDiskChunk ) sphDie ( "disk chunk %s: alloc failed", sChunk.cstr() ); pDiskChunk->SetWordlistPreload ( m_bPreloadWordlist ); pDiskChunk->m_iExpansionLimit = m_iExpansionLimit; if ( !pDiskChunk->Prealloc ( false, m_bPathStripped, sWarning ) ) sphDie ( "disk chunk %s: prealloc failed: %s", sChunk.cstr(), pDiskChunk->GetLastError().cstr() ); if ( !pDiskChunk->Preread() ) sphDie ( "disk chunk %s: preread failed: %s", sChunk.cstr(), pDiskChunk->GetLastError().cstr() ); return pDiskChunk; } bool RtIndex_t::Prealloc ( bool, bool bStripPath, CSphString & ) { MEMORY ( SPH_MEM_IDX_RT ); // locking uber alles // in RT backend case, we just must be multi-threaded // so we simply lock here, and ignore Lock/Unlock hassle caused by forks assert ( m_iLockFD<0 ); CSphString sLock; sLock.SetSprintf ( "%s.lock", m_sPath.cstr() ); m_iLockFD = ::open ( sLock.cstr(), SPH_O_NEW, 0644 ); if ( m_iLockFD<0 ) { m_sLastError.SetSprintf ( "failed to open %s: %s", sLock.cstr(), strerror(errno) ); return false; } if ( !sphLockEx ( m_iLockFD, false ) ) { m_sLastError.SetSprintf ( "failed to lock %s: %s", sLock.cstr(), strerror(errno) ); ::close ( m_iLockFD ); return false; } ///////////// // load meta ///////////// // check if we have a meta file (kinda-header) CSphString sMeta; sMeta.SetSprintf ( "%s.meta", m_sPath.cstr() ); // no readable meta? no disk part yet if ( !sphIsReadable ( sMeta.cstr() ) ) return true; // opened and locked, lets read CSphAutoreader rdMeta; if ( !rdMeta.Open ( sMeta, m_sLastError ) ) return false; if ( rdMeta.GetDword()!=META_HEADER_MAGIC ) { m_sLastError.SetSprintf ( "invalid meta file %s", sMeta.cstr() ); return false; } DWORD uVersion = rdMeta.GetDword(); if ( uVersion==0 || uVersion>META_VERSION ) { m_sLastError.SetSprintf ( "%s is v.%d, binary is v.%d", sMeta.cstr(), uVersion, META_VERSION ); return false; } const int iDiskChunks = rdMeta.GetDword(); m_tStats.m_iTotalDocuments = rdMeta.GetDword(); m_tStats.m_iTotalBytes = rdMeta.GetOffset(); if ( uVersion>=2 ) m_iTID = rdMeta.GetOffset(); // tricky bit // we started saving settings into .meta from v.4 and up only // and those reuse disk format version, aka INDEX_FORMAT_VERSION // anyway, starting v.4, serialized settings take precedence over config // so different chunks can't have different settings any more if ( uVersion>=4 ) { CSphTokenizerSettings tTokenizerSettings; CSphDictSettings tDictSettings; CSphString sWarning; // load them settings DWORD uSettingsVer = rdMeta.GetDword(); ReadSchema ( rdMeta, m_tSchema, uSettingsVer, false ); LoadIndexSettings ( m_tSettings, rdMeta, uSettingsVer ); LoadTokenizerSettings ( rdMeta, tTokenizerSettings, uSettingsVer, sWarning ); LoadDictionarySettings ( rdMeta, tDictSettings, uSettingsVer, sWarning ); // meta v.5 dictionary if ( uVersion>=5 ) m_bKeywordDict = tDictSettings.m_bWordDict; // fixup them settings if ( m_bId32to64 ) tDictSettings.m_bCrc32 = true; if ( bStripPath ) { StripPath ( tTokenizerSettings.m_sSynonymsFile ); StripPath ( tDictSettings.m_sStopwords ); StripPath ( tDictSettings.m_sWordforms ); } // recreate tokenizer SafeDelete ( m_pTokenizer ); m_pTokenizer = ISphTokenizer::Create ( tTokenizerSettings, m_sLastError ); if ( !m_pTokenizer ) return false; // !COMMIT implement support for multiforms, eh? // ISphTokenizer * pTokenFilter = ISphTokenizer::CreateTokenFilter ( pTokenizer, pDict->GetMultiWordforms () ); // SetTokenizer ( pTokenFilter ? pTokenFilter : pTokenizer ); // recreate dictionary SafeDelete ( m_pDict ); m_pDict = sphCreateDictionaryCRC ( tDictSettings, m_pTokenizer, m_sLastError, m_sIndexName.cstr() ); if ( !m_pDict ) return false; // update schema m_iStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); } // meta v.5 checkpoint freq m_iWordsCheckpoint = ( uVersion<5 ? SPH_RT_WORDS_PER_CHECKPOINT_v3 : SPH_RT_WORDS_PER_CHECKPOINT_v5 ); if ( uVersion>=5 ) { m_iWordsCheckpoint = rdMeta.GetDword(); } /////////////// // load chunks /////////////// m_bPathStripped = bStripPath; // load disk chunks, if any for ( int iChunk=0; iChunkSetEnableStar ( m_bEnableStar ); // tricky bit // outgoing match schema on disk chunk should be identical to our internal (!) schema if ( !m_tSchema.CompareTo ( m_pDiskChunks.Last()->GetMatchSchema(), m_sLastError ) ) return false; } // load ram chunk bool bRamLoaded = LoadRamChunk ( uVersion ); // set up values for on timer save m_iSavedTID = m_iTID; m_iSavedRam = GetUsedRam(); m_tmSaved = sphMicroTimer(); return bRamLoaded; } bool RtIndex_t::Preread () { // !COMMIT move disk chunks prereading here return true; } template < typename T > struct IsPodType { enum { Value = false }; }; template<> struct IsPodType { enum { Value = true }; }; template<> struct IsPodType { enum { Value = true }; }; template<> struct IsPodType { enum { Value = true }; }; template<> struct IsPodType { enum { Value = true }; }; template<> struct IsPodType { enum { Value = true }; }; template<> struct IsPodType { enum { Value = true }; }; template < typename T, typename P > static void SaveVector ( CSphWriter & tWriter, const CSphVector < T, P > & tVector ) { STATIC_ASSERT ( IsPodType::Value, NON_POD_VECTORS_ARE_UNSERIALIZABLE ); tWriter.PutDword ( tVector.GetLength() ); if ( tVector.GetLength() ) tWriter.PutBytes ( tVector.Begin(), tVector.GetLength()*sizeof(T) ); } template < typename T, typename P > static void LoadVector ( CSphReader & tReader, CSphVector < T, P > & tVector ) { STATIC_ASSERT ( IsPodType::Value, NON_POD_VECTORS_ARE_UNSERIALIZABLE ); tVector.Resize ( tReader.GetDword() ); // FIXME? sanitize? if ( tVector.GetLength() ) tReader.GetBytes ( tVector.Begin(), tVector.GetLength()*sizeof(T) ); } template < typename T, typename P > static void SaveVector ( BinlogWriter_c & tWriter, const CSphVector < T, P > & tVector ) { STATIC_ASSERT ( IsPodType::Value, NON_POD_VECTORS_ARE_UNSERIALIZABLE ); tWriter.ZipValue ( tVector.GetLength() ); if ( tVector.GetLength() ) tWriter.PutBytes ( tVector.Begin(), tVector.GetLength()*sizeof(T) ); } template < typename T, typename P > static bool LoadVector ( BinlogReader_c & tReader, CSphVector < T, P > & tVector ) { STATIC_ASSERT ( IsPodType::Value, NON_POD_VECTORS_ARE_UNSERIALIZABLE ); tVector.Resize ( (int) tReader.UnzipValue() ); // FIXME? sanitize? if ( tVector.GetLength() ) tReader.GetBytes ( tVector.Begin(), tVector.GetLength()*sizeof(T) ); return !tReader.GetErrorFlag(); } bool RtIndex_t::SaveRamChunk () { MEMORY ( SPH_MEM_IDX_RT ); CSphString sChunk, sNewChunk; sChunk.SetSprintf ( "%s.ram", m_sPath.cstr() ); sNewChunk.SetSprintf ( "%s.ram.new", m_sPath.cstr() ); m_tKlist.SaveToFile ( m_sPath.cstr() ); CSphWriter wrChunk; if ( !wrChunk.OpenFile ( sNewChunk, m_sLastError ) ) return false; wrChunk.PutDword ( USE_64BIT ); wrChunk.PutDword ( RtSegment_t::m_iSegments ); wrChunk.PutDword ( m_pSegments.GetLength() ); // no locks here, because it's only intended to be called from dtor ARRAY_FOREACH ( iSeg, m_pSegments ) { const RtSegment_t * pSeg = m_pSegments[iSeg]; wrChunk.PutDword ( pSeg->m_iTag ); SaveVector ( wrChunk, pSeg->m_dWords ); if ( m_bKeywordDict ) { SaveVector ( wrChunk, pSeg->m_dKeywordCheckpoints ); } const char * pCheckpoints = (const char *)pSeg->m_dKeywordCheckpoints.Begin(); wrChunk.PutDword ( pSeg->m_dWordCheckpoints.GetLength() ); ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { wrChunk.PutOffset ( pSeg->m_dWordCheckpoints[i].m_iOffset ); if ( m_bKeywordDict ) { wrChunk.PutOffset ( pSeg->m_dWordCheckpoints[i].m_sWord-pCheckpoints ); } else { wrChunk.PutOffset ( pSeg->m_dWordCheckpoints[i].m_iWordID ); } } SaveVector ( wrChunk, pSeg->m_dDocs ); SaveVector ( wrChunk, pSeg->m_dHits ); wrChunk.PutDword ( pSeg->m_iRows ); wrChunk.PutDword ( pSeg->m_iAliveRows ); SaveVector ( wrChunk, pSeg->m_dRows ); SaveVector ( wrChunk, pSeg->m_dKlist ); SaveVector ( wrChunk, pSeg->m_dStrings ); SaveVector ( wrChunk, pSeg->m_dMvas ); } wrChunk.CloseFile(); if ( wrChunk.IsError() ) return false; // rename if ( ::rename ( sNewChunk.cstr(), sChunk.cstr() ) ) sphDie ( "failed to rename ram chunk (src=%s, dst=%s, errno=%d, error=%s)", sNewChunk.cstr(), sChunk.cstr(), errno, strerror(errno) ); // !COMMIT handle this gracefully return true; } bool RtIndex_t::LoadRamChunk ( DWORD uVersion ) { MEMORY ( SPH_MEM_IDX_RT ); CSphString sChunk; sChunk.SetSprintf ( "%s.ram", m_sPath.cstr() ); if ( !sphIsReadable ( sChunk.cstr(), &m_sLastError ) ) return true; m_tKlist.LoadFromFile ( m_sPath.cstr() ); CSphAutoreader rdChunk; if ( !rdChunk.Open ( sChunk, m_sLastError ) ) return false; bool bId64 = ( rdChunk.GetDword()!=0 ); if ( bId64!=USE_64BIT ) { #if USE_64BIT // #if 0 // TODO: may be do this param conditional and push it into the config? m_bId32to64 = true; #else m_sLastError.SetSprintf ( "ram chunk dumped by %s binary; this binary is %s", bId64 ? "id64" : "id32", USE_64BIT ? "id64" : "id32" ); return false; #endif } int iSegmentSeq = rdChunk.GetDword(); m_pSegments.Resize ( rdChunk.GetDword() ); // FIXME? sanitize ARRAY_FOREACH ( iSeg, m_pSegments ) { RtSegment_t * pSeg = new RtSegment_t (); m_pSegments[iSeg] = pSeg; pSeg->m_iTag = rdChunk.GetDword (); LoadVector ( rdChunk, pSeg->m_dWords ); if ( uVersion>=5 && m_bKeywordDict ) { LoadVector ( rdChunk, pSeg->m_dKeywordCheckpoints ); } const char * pCheckpoints = (const char *)pSeg->m_dKeywordCheckpoints.Begin(); pSeg->m_dWordCheckpoints.Resize ( rdChunk.GetDword() ); ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { pSeg->m_dWordCheckpoints[i].m_iOffset = (int)rdChunk.GetOffset(); SphOffset_t uOff = rdChunk.GetOffset(); if ( m_bKeywordDict ) { pSeg->m_dWordCheckpoints[i].m_sWord = pCheckpoints + uOff; } else { pSeg->m_dWordCheckpoints[i].m_iWordID = (SphWordID_t)uOff; } } LoadVector ( rdChunk, pSeg->m_dDocs ); LoadVector ( rdChunk, pSeg->m_dHits ); pSeg->m_iRows = rdChunk.GetDword(); pSeg->m_iAliveRows = rdChunk.GetDword(); LoadVector ( rdChunk, pSeg->m_dRows ); LoadVector ( rdChunk, pSeg->m_dKlist ); LoadVector ( rdChunk, pSeg->m_dStrings ); if ( uVersion>=3 ) LoadVector ( rdChunk, pSeg->m_dMvas ); } RtSegment_t::m_iSegments = iSegmentSeq; if ( rdChunk.GetErrorFlag() ) return false; return true; } void RtIndex_t::PostSetup() { if ( m_bId32to64 ) { SaveDiskChunk(); // since the RAM chunk is just stored as id32, we are no more in compat mode m_bId32to64 = false; } } #define LOC_FAIL(_args) \ if ( ++iFails<=FAILS_THRESH ) \ { \ fprintf ( fp, "FAILED, " ); \ fprintf _args; \ fprintf ( fp, "\n" ); \ iFailsPrinted++; \ \ if ( iFails==FAILS_THRESH ) \ fprintf ( fp, "(threshold reached; suppressing further output)\n" ); \ } int RtIndex_t::DebugCheck ( FILE * fp ) { const int FAILS_THRESH = 100; int iFails = 0; int iFailsPrinted = 0; int iFailsPlain = 0; int64_t tmCheck = sphMicroTimer(); ARRAY_FOREACH ( i, m_pSegments ) { SphWordID_t uPrevWordID = 0; RtWordReader_t tSeg ( m_pSegments[i], false, m_iWordsCheckpoint ); const RtWord_t * pWord = NULL; int iWord = 0; while ( ( pWord = tSeg.UnzipWord() )!=NULL ) { if ( pWord->m_uWordID<=uPrevWordID ) { LOC_FAIL(( fp, "wordid decreased (segment=%d, word=%d, wordid="UINT64_FMT", previd="UINT64_FMT")", i, iWord, (uint64_t)pWord->m_uWordID, (uint64_t)uPrevWordID )); } uPrevWordID = pWord->m_uWordID; iWord++; } } ARRAY_FOREACH ( i, m_pDiskChunks ) { fprintf ( fp, "checking disk chunk %d(%d)...\n", i, m_pDiskChunks.GetLength() ); iFailsPlain += m_pDiskChunks[i]->DebugCheck ( fp ); } tmCheck = sphMicroTimer() - tmCheck; if ( ( iFails+iFailsPlain )==0 ) fprintf ( fp, "check passed" ); else if ( iFails!=iFailsPrinted ) fprintf ( fp, "check FAILED, %d of %d failures reported", iFailsPrinted, iFails+iFailsPlain ); else fprintf ( fp, "check FAILED, %d failures reported", iFails+iFailsPlain ); fprintf ( fp, ", %d.%d sec elapsed\n", (int)(tmCheck/1000000), (int)((tmCheck/100000)%10) ); return iFails + iFailsPlain; } void RtIndex_t::SetEnableStar ( bool bEnableStar ) { m_bEnableStar = bEnableStar; ARRAY_FOREACH ( i, m_pDiskChunks ) m_pDiskChunks[i]->SetEnableStar ( bEnableStar ); } ////////////////////////////////////////////////////////////////////////// // SEARCHING ////////////////////////////////////////////////////////////////////////// struct RtQword_t : public ISphQword { friend struct RtIndex_t; friend struct RtQwordSetup_t; protected: RtDocReader_t * m_pDocReader; CSphMatch m_tMatch; DWORD m_uNextHit; RtHitReader2_t m_tHitReader; RtSegment_t * m_pSeg; public: RtQword_t () : m_pDocReader ( NULL ) , m_uNextHit ( 0 ) , m_pSeg ( NULL ) { m_tMatch.Reset ( 0 ); } virtual ~RtQword_t () { SafeDelete ( m_pDocReader ); } virtual const CSphMatch & GetNextDoc ( DWORD * ) { for ( ;; ) { const RtDoc_t * pDoc = m_pDocReader->UnzipDoc(); if ( !pDoc ) { m_tMatch.m_iDocID = 0; return m_tMatch; } if ( m_pSeg->m_dKlist.BinarySearch ( pDoc->m_uDocID ) ) continue; m_tMatch.m_iDocID = pDoc->m_uDocID; m_dQwordFields.Assign32 ( pDoc->m_uDocFields ); m_uMatchHits = pDoc->m_uHits; m_iHitlistPos = (uint64_t(pDoc->m_uHits)<<32) + pDoc->m_uHit; m_bAllFieldsKnown = false; return m_tMatch; } } virtual void SeekHitlist ( SphOffset_t uOff ) { int iHits = (int)(uOff>>32); if ( iHits==1 ) { m_uNextHit = DWORD(uOff); } else { m_uNextHit = 0; m_tHitReader.Seek ( DWORD(uOff), iHits ); } } virtual Hitpos_t GetNextHit () { if ( m_uNextHit==0 ) { return Hitpos_t ( m_tHitReader.UnzipHit() ); } else if ( m_uNextHit==0xffffffffUL ) { return EMPTY_HIT; } else { DWORD uRes = m_uNextHit; m_uNextHit = 0xffffffffUL; return Hitpos_t ( uRes ); } } }; struct RtQwordSetup_t : ISphQwordSetup { RtSegment_t * m_pSeg; virtual ISphQword * QwordSpawn ( const XQKeyword_t & ) const; virtual bool QwordSetup ( ISphQword * pQword ) const; }; ISphQword * RtQwordSetup_t::QwordSpawn ( const XQKeyword_t & ) const { return new RtQword_t (); } bool RtQwordSetup_t::QwordSetup ( ISphQword * pQword ) const { RtQword_t * pMyWord = dynamic_cast ( pQword ); if ( !pMyWord ) return false; const RtIndex_t * pIndex = dynamic_cast< const RtIndex_t * > ( m_pIndex ); if ( !pIndex ) return false; return pIndex->RtQwordSetup ( pMyWord, m_pSeg ); } bool RtIndex_t::EarlyReject ( CSphQueryContext * pCtx, CSphMatch & tMatch ) const { // might be needed even when we do not have a filter! if ( pCtx->m_bLookupFilter || pCtx->m_bLookupSort ) CopyDocinfo ( tMatch, FindDocinfo ( (RtSegment_t*)pCtx->m_pIndexData, tMatch.m_iDocID ) ); pCtx->CalcFilter ( tMatch ); return pCtx->m_pFilter ? !pCtx->m_pFilter->Eval ( tMatch ) : false; } void RtIndex_t::CopyDocinfo ( CSphMatch & tMatch, const DWORD * pFound ) const { if ( !pFound ) return; // setup static pointer assert ( DOCINFO2ID(pFound)==tMatch.m_iDocID ); tMatch.m_pStatic = DOCINFO2ATTRS(pFound); // FIXME? implement overrides } const CSphRowitem * RtIndex_t::FindDocinfo ( const RtSegment_t * pSeg, SphDocID_t uDocID ) const { // FIXME! move to CSphIndex, and implement hashing if ( pSeg->m_dRows.GetLength()==0 ) return NULL; int iStride = m_iStride; int iStart = 0; int iEnd = pSeg->m_iRows-1; assert ( iStride==( DOCINFO_IDSIZE + m_tSchema.GetRowSize() ) ); const CSphRowitem * pStorage = pSeg->m_dRows.Begin(); const CSphRowitem * pFound = NULL; if ( uDocID==DOCINFO2ID ( &pStorage [ iStart*iStride ] ) ) { pFound = &pStorage [ iStart*iStride ]; } else if ( uDocID==DOCINFO2ID ( &pStorage [ iEnd*iStride ] ) ) { pFound = &pStorage [ iEnd*iStride ]; } else { while ( iEnd-iStart>1 ) { // check if nothing found if ( uDocID < DOCINFO2ID ( &pStorage [ iStart*iStride ] ) || uDocID > DOCINFO2ID ( &pStorage [ iEnd*iStride ] ) ) break; assert ( uDocID > DOCINFO2ID ( &pStorage [ iStart*iStride ] ) ); assert ( uDocID < DOCINFO2ID ( &pStorage [ iEnd*iStride ] ) ); int iMid = iStart + (iEnd-iStart)/2; if ( uDocID==DOCINFO2ID ( &pStorage [ iMid*iStride ] ) ) { pFound = &pStorage [ iMid*iStride ]; break; } if ( uDocIDm_iWordID; const char * sWord = pQword->m_sDictWord.cstr(); int iWordLen = pQword->m_sDictWord.Length(); if ( bWordDict && iWordLen && sWord[iWordLen-1]=='*' ) // crc star search emulation { iWordLen = iWordLen-1; } RtWordReader_t tReader ( pCurSeg, bWordDict, iWordsCheckpoint ); if ( pCurSeg->m_dWordCheckpoints.GetLength() ) { const RtWordCheckpoint_t * pCp = sphSearchCheckpoint ( sWord, iWordLen, uWordID, false, bWordDict , pCurSeg->m_dWordCheckpoints.Begin(), &pCurSeg->m_dWordCheckpoints.Last() ); const BYTE * pWords = pCurSeg->m_dWords.Begin(); if ( !pCp ) { tReader.m_pMax = pWords + pCurSeg->m_dWordCheckpoints.Begin()->m_iOffset; } else { tReader.m_pCur = pWords + pCp->m_iOffset; // if next checkpoint exists setup reader range if ( ( pCp+1 )<= ( &pCurSeg->m_dWordCheckpoints.Last() ) ) tReader.m_pMax = pWords + pCp[1].m_iOffset; } } // find the word between checkpoints const RtWord_t * pWord = NULL; while ( ( pWord = tReader.UnzipWord() )!=NULL ) { int iCmp = 0; if ( bWordDict ) { iCmp = sphDictCmpStrictly ( (const char *)pWord->m_sWord+1, pWord->m_sWord[0], sWord, iWordLen ); } else { if ( pWord->m_uWordIDm_uWordID>uWordID ) iCmp = 1; } if ( iCmp==0 ) { pQword->m_iDocs += pWord->m_uDocs; pQword->m_iHits += pWord->m_uHits; if ( bSetup ) { SafeDelete ( pQword->m_pDocReader ); pQword->m_pDocReader = new RtDocReader_t ( pCurSeg, *pWord ); pQword->m_tHitReader.m_pBase = NULL; if ( pCurSeg->m_dHits.GetLength() ) pQword->m_tHitReader.m_pBase = pCurSeg->m_dHits.Begin(); pQword->m_pSeg = pCurSeg; } return true; } else if ( iCmp>0 ) return false; } return false; } void RtIndex_t::GetPrefixedWords ( const char * sWord, int iWordLen, CSphVector & dPrefixedWords, BYTE * , int ) const { SmallStringHash_T hPrefixedWords; ARRAY_FOREACH ( i, m_pSegments ) { RtSegment_t * pCurSeg = m_pSegments[i]; RtWordReader_t tReader ( pCurSeg, true, m_iWordsCheckpoint ); // find initial checkpoint or check words prior to 1st checkpoint if ( pCurSeg->m_dWordCheckpoints.GetLength() ) { const RtWordCheckpoint_t * pCurCheckpoint = sphSearchCheckpoint ( sWord, iWordLen, 0, true, true , pCurSeg->m_dWordCheckpoints.Begin(), &pCurSeg->m_dWordCheckpoints.Last() ); if ( pCurCheckpoint ) { // there could be valid data prior 1st checkpoint that should be unpacked and checked int iCheckpointNameLen = strlen ( pCurCheckpoint->m_sWord ); if ( pCurCheckpoint!=pCurSeg->m_dWordCheckpoints.Begin() || ( sphDictCmp ( sWord, iWordLen, pCurCheckpoint->m_sWord, iCheckpointNameLen )==0 && iWordLen==iCheckpointNameLen ) ) { tReader.m_pCur = pCurSeg->m_dWords.Begin() + pCurCheckpoint->m_iOffset; } } } // find the word between checkpoints const RtWord_t * pWord = NULL; while ( ( pWord = tReader.UnzipWord() )!=NULL ) { int iCmp = sphDictCmp ( sWord, iWordLen, (const char *)pWord->m_sWord+1, pWord->m_sWord[0] ); if ( iCmp<0 ) { break; } else if ( iCmp==0 && iWordLen<=pWord->m_sWord[0] ) { CSphString sExpandedWord; sExpandedWord.SetBinary ( (const char *)pWord->m_sWord+1, pWord->m_sWord[0] ); int * pDocs = hPrefixedWords ( sExpandedWord ); if ( pDocs ) { *pDocs += pWord->m_uDocs; } else { hPrefixedWords.Add ( pWord->m_uDocs, sExpandedWord ); } } // FIXME!!! same case 'boxi*' matches 'box' document at plain index // but masked by a checkpoint search } } dPrefixedWords.Reserve ( dPrefixedWords.GetLength() + hPrefixedWords.GetLength() ); hPrefixedWords.IterateStart(); while ( hPrefixedWords.IterateNext() ) { CSphNamedInt & tExpanded = dPrefixedWords.Add (); tExpanded.m_sName = hPrefixedWords.IterateGetKey(); tExpanded.m_iValue = hPrefixedWords.IterateGet(); } } bool RtIndex_t::RtQwordSetup ( RtQword_t * pQword, RtSegment_t * pSeg ) const { // segment-specific setup pass if ( pSeg ) return RtQwordSetupSegment ( pQword, pSeg, true, m_bKeywordDict, m_iWordsCheckpoint ); // stat-only pass // loop all segments, gather stats, do not setup anything assert ( !pSeg ); pQword->m_iDocs = 0; pQword->m_iHits = 0; // we care about the results anyway though // because if all (!) segments miss this word, we must notify the caller, right? bool bRes = true; ARRAY_FOREACH ( i, m_pSegments ) bRes &= RtQwordSetupSegment ( pQword, m_pSegments[i], false, m_bKeywordDict, m_iWordsCheckpoint ); // sanity check assert ( !( m_pSegments.GetLength()!=0 && bRes==true && pQword->m_iDocs==0 ) ); return bRes; } static void AddKillListFilter ( CSphVector * pExtra, const SphAttr_t * pKillList, int nEntries ) { assert ( nEntries && pKillList && pExtra ); CSphFilterSettings & tFilter = pExtra->Add(); tFilter.m_bExclude = true; tFilter.m_eType = SPH_FILTER_VALUES; tFilter.m_uMinValue = pKillList[0]; tFilter.m_uMaxValue = pKillList[nEntries-1]; tFilter.m_sAttrName = "@id"; tFilter.SetExternalValues ( pKillList, nEntries ); } CSphDict * RtIndex_t::SetupExactDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer * pTokenizer ) const { assert ( pTokenizer ); if ( !m_tSettings.m_bIndexExactWords ) return pPrevDict; tContainer = new CSphDictExact ( pPrevDict ); CSphRemapRange tStar ( '=', '=', '=' ); // FIXME? check and warn if star was already there pTokenizer->AddCaseFolding ( tStar ); return tContainer.Ptr(); } CSphDict * RtIndex_t::SetupStarDict ( CSphScopedPtr & tContainer, CSphDict * pPrevDict, ISphTokenizer * pTokenizer ) const { assert ( pTokenizer ); if ( !m_bEnableStar || !m_bKeywordDict ) return pPrevDict; tContainer = new CSphDictStarV8 ( pPrevDict, false, true ); CSphRemapRange tStar ( '*', '*', '*' ); // FIXME? check and warn if star was already there pTokenizer->AddCaseFolding ( tStar ); return tContainer.Ptr(); } // FIXME! missing MVA, index_exact_words support // FIXME? missing enable_star, legacy match modes support // FIXME? any chance to factor out common backend agnostic code? // FIXME? do we need to support pExtraFilters? #ifndef NDEBUG bool RtIndex_t::MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector *, int iTag ) const #else bool RtIndex_t::MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector *, int ) const #endif { assert ( ppSorters ); // to avoid the checking of a ppSorters's element for NULL on every next step, just filter out all nulls right here CSphVector dSorters; dSorters.Reserve ( iSorters ); for ( int i=0; im_iQueryTime = 0; return false; } // FIXME! too early (how low can you go?) m_tRwlock.ReadLock (); assert ( pQuery ); assert ( pResult ); assert ( iTag==0 ); MEMORY ( SPH_MEM_IDX_RT_MULTY_QUERY ); // start counting pResult->m_iQueryTime = 0; int64_t tmQueryStart = sphMicroTimer(); // force ext2 mode for them // FIXME! eliminate this const breakage const_cast ( pQuery )->m_eMode = SPH_MATCH_EXTENDED2; // wrappers CSphScopedPtr pTokenizer ( m_pTokenizer->Clone ( false ) ); CSphScopedPtr tDictCloned ( NULL ); CSphDict * pDict = m_pDict; if ( pDict->HasState() ) { tDictCloned = pDict = pDict->Clone(); } CSphScopedPtr tDictStar ( NULL ); pDict = SetupStarDict ( tDictStar, pDict, pTokenizer.Ptr() ); CSphScopedPtr tDictExact ( NULL ); pDict = SetupExactDict ( tDictExact, pDict, pTokenizer.Ptr() ); // FIXME! slow disk searches could lock out concurrent writes for too long // FIXME! each result will point to its own MVA and string pools // !COMMIT need to setup disk K-list here ////////////////////// // search disk chunks ////////////////////// bool m_bKlistLocked = false; CSphVector dExtra; // first, collect all the killlists into a vector for ( int iChunk = m_pDiskChunks.GetLength()-1; iChunk>=0; iChunk-- ) { const int iOldLength = dExtra.GetLength(); if ( iChunk==m_pDiskChunks.GetLength()-1 ) { // For the topmost chunk we add the killlist from the ram-index m_tKlist.Flush(); m_tKlist.KillListLock(); if ( m_tKlist.GetKillListSize() ) { // we don't lock in vain... m_bKlistLocked = true; AddKillListFilter ( &dExtra, m_tKlist.GetKillList(), m_tKlist.GetKillListSize() ); } else m_tKlist.KillListUnlock(); } else { const CSphIndex * pDiskChunk = m_pDiskChunks[iChunk+1]; if ( pDiskChunk->GetKillListSize () ) AddKillListFilter ( &dExtra, pDiskChunk->GetKillList(), pDiskChunk->GetKillListSize() ); } if ( dExtra.GetLength()==iOldLength ) dExtra.Add(); } CSphVector dWrongWords; SmallStringHash_T hDiskStats; int64_t tmMaxTimer = 0; if ( pQuery->m_uMaxQueryMsec>0 ) tmMaxTimer = sphMicroTimer() + pQuery->m_uMaxQueryMsec*1000; // max_query_time assert ( dExtra.GetLength()==m_pDiskChunks.GetLength() ); CSphVector dDiskStrings ( m_pDiskChunks.GetLength() ); CSphVector dDiskMva ( m_pDiskChunks.GetLength() ); ARRAY_FOREACH ( iChunk, m_pDiskChunks ) { CSphQueryResult tChunkResult; // storing index in matches tag for finding strings attrs offset later, biased against default zero and segments const int iTag = m_pSegments.GetLength()+iChunk+1; if ( !m_pDiskChunks[iChunk]->MultiQuery ( pQuery, &tChunkResult, iSorters, ppSorters, &dExtra, iTag ) ) { // FIXME? maybe handle this more gracefully (convert to a warning)? pResult->m_sError = tChunkResult.m_sError; m_tRwlock.Unlock (); if ( m_bKlistLocked ) m_tKlist.KillListUnlock(); return false; } // check terms inconsistency amongs disk chunks const SmallStringHash_T & hSrcStats = tChunkResult.m_hWordStats; if ( pResult->m_hWordStats.GetLength() ) { hSrcStats.IterateStart(); while ( hSrcStats.IterateNext() ) { const CSphQueryResultMeta::WordStat_t * pDstStat = pResult->m_hWordStats ( hSrcStats.IterateGetKey() ); const CSphQueryResultMeta::WordStat_t & tSrcStat = hSrcStats.IterateGet(); // all indexes should produce same words from the query if ( !pDstStat && !tSrcStat.m_bExpanded ) { dWrongWords.Add ( hSrcStats.IterateGetKey() ); } pResult->AddStat ( hSrcStats.IterateGetKey(), tSrcStat.m_iDocs, tSrcStat.m_iHits, tSrcStat.m_bExpanded ); } } else { pResult->m_hWordStats = hSrcStats; } dDiskStrings[iChunk] = tChunkResult.m_pStrings; dDiskMva[iChunk] = tChunkResult.m_pMva; dExtra.Pop(); // keep last chunk statistics to check vs rt settings if ( iChunk==m_pDiskChunks.GetLength()-1 ) hDiskStats = hSrcStats; if ( (iChunk+1)!=m_pDiskChunks.GetLength() && tmMaxTimer>0 && sphMicroTimer()>=tmMaxTimer ) { pResult->m_sWarning = "query time exceeded max_query_time"; break; } } if ( m_bKlistLocked ) m_tKlist.KillListUnlock(); //////////////////// // search RAM chunk //////////////////// // select the sorter with max schema int iMaxSchemaSize = -1; int iMaxSchemaIndex = -1; ARRAY_FOREACH ( i, dSorters ) if ( dSorters[i]->GetSchema().GetRowSize() > iMaxSchemaSize ) { iMaxSchemaSize = dSorters[i]->GetSchema().GetRowSize(); iMaxSchemaIndex = i; } // setup calculations and result schema CSphQueryContext tCtx; if ( !tCtx.SetupCalc ( pResult, dSorters[iMaxSchemaIndex]->GetSchema(), m_tSchema, NULL ) ) { m_tRwlock.Unlock (); return false; } // setup search terms RtQwordSetup_t tTermSetup; tTermSetup.m_pDict = pDict; tTermSetup.m_pIndex = this; tTermSetup.m_eDocinfo = m_tSettings.m_eDocinfo; tTermSetup.m_iDynamicRowitems = pResult->m_tSchema.GetDynamicSize(); if ( pQuery->m_uMaxQueryMsec>0 ) tTermSetup.m_iMaxTimer = sphMicroTimer() + pQuery->m_uMaxQueryMsec*1000; // max_query_time tTermSetup.m_pWarning = &pResult->m_sWarning; tTermSetup.m_pSeg = NULL; tTermSetup.m_pCtx = &tCtx; int iIndexWeight = pQuery->GetIndexWeight ( m_sIndexName.cstr() ); // bind weights tCtx.BindWeights ( pQuery, m_tSchema, iIndexWeight ); // parse query XQQuery_t tParsed; if ( !sphParseExtendedQuery ( tParsed, pQuery->m_sQuery.cstr(), pTokenizer.Ptr(), &m_tSchema, pDict, m_tSettings.m_iStopwordStep ) ) { pResult->m_sError = tParsed.m_sParseError; m_tRwlock.Unlock (); return false; } // transform query if needed (quorum transform, keyword expansion, etc.) sphTransformExtendedQuery ( &tParsed.m_pRoot ); // expanding prefix in word dictionary case if ( m_bEnableStar && m_bKeywordDict ) { ExpansionContext_t tCtx; tCtx.m_pWordlist = this; tCtx.m_pBuf = NULL; tCtx.m_pResult = pResult; tCtx.m_iFD = -1; tCtx.m_iMinPrefixLen = m_tSettings.m_iMinPrefixLen; tCtx.m_iExpansionLimit = m_iExpansionLimit; tCtx.m_bStarEnabled = true; tCtx.m_bHasMorphology = m_pDict->HasMorphology(); tParsed.m_pRoot = sphExpandXQNode ( tParsed.m_pRoot, tCtx ); } if ( !sphCheckQueryHeight ( tParsed.m_pRoot, pResult->m_sError ) ) { m_tRwlock.Unlock (); return false; } // setup query // must happen before index-level reject, in order to build proper keyword stats CSphScopedPtr pRanker ( sphCreateRanker ( tParsed, pQuery, pResult, tTermSetup, tCtx ) ); if ( !pRanker.Ptr() ) { m_tRwlock.Unlock (); return false; } // check terms inconsistency disk chunks vs rt if ( pResult->m_hWordStats.GetLength() && hDiskStats.GetLength() ) { const SmallStringHash_T & hSrcStats = pResult->m_hWordStats; hSrcStats.IterateStart(); while ( hSrcStats.IterateNext() ) { const CSphQueryResultMeta::WordStat_t * pDstStat = hDiskStats ( hSrcStats.IterateGetKey() ); const CSphQueryResultMeta::WordStat_t & tSrcStat = hSrcStats.IterateGet(); // all indexes should produce same words from the query if ( !pDstStat && !tSrcStat.m_bExpanded ) { dWrongWords.Add ( hSrcStats.IterateGetKey() ); } } } // make warning on terms inconsistency if ( dWrongWords.GetLength() ) { dWrongWords.Uniq(); pResult->m_sWarning.SetSprintf ( "index '%s': query word(s) mismatch: %s", m_sIndexName.cstr(), dWrongWords.Begin()->cstr() ); for ( int i=1; im_sWarning.SetSprintf ( "%s, %s", pResult->m_sWarning.cstr(), dWrongWords[i].cstr() ); } // empty index, empty result if ( !m_pSegments.GetLength() && !m_pDiskChunks.GetLength() ) { pResult->m_iQueryTime = 0; m_tRwlock.Unlock (); return true; } // search segments no looking to max_query_time // FIXME!!! move searching at segments before disk chunks as result set is safe with kill-lists if ( m_pSegments.GetLength() ) { // setup filters // FIXME! setup filters MVA pool bool bFullscan = ( pQuery->m_eMode==SPH_MATCH_FULLSCAN || pQuery->m_sQuery.IsEmpty() ); if ( !tCtx.CreateFilters ( bFullscan, &pQuery->m_dFilters, pResult->m_tSchema, NULL, pResult->m_sError ) ) { m_tRwlock.Unlock (); return false; } // FIXME! OPTIMIZE! check if we can early reject the whole index // setup lookup // do pre-filter lookup as needed // do pre-sort lookup in all cases // post-sort lookup is complicated (because of many segments) // pre-sort lookup is cheap now anyway, and almost always anyway // (except maybe by stupid relevance-sorting-only benchmarks!!) tCtx.m_bLookupFilter = ( pQuery->m_dFilters.GetLength() || tCtx.m_dCalcFilter.GetLength() ); tCtx.m_bLookupSort = true; // FIXME! setup overrides // do searching bool bRandomize = dSorters[0]->m_bRandomize; int iCutoff = pQuery->m_iCutoff; if ( iCutoff<=0 ) iCutoff = -1; if ( bFullscan ) { // full scan // FIXME? OPTIMIZE? add shortcuts here too? CSphMatch tMatch; tMatch.Reset ( pResult->m_tSchema.GetDynamicSize() ); tMatch.m_iWeight = pQuery->GetIndexWeight ( m_sIndexName.cstr() ); int iCutoff = pQuery->m_iCutoff; if ( iCutoff<=0 ) iCutoff = -1; ARRAY_FOREACH ( iSeg, m_pSegments ) { // set string pool for string on_sort expression fix up tCtx.SetStringPool ( m_pSegments[iSeg]->m_dStrings.Begin() ); tCtx.SetMVAPool ( m_pSegments[iSeg]->m_dMvas.Begin() ); ARRAY_FOREACH ( i, dSorters ) { dSorters[i]->SetStringPool ( m_pSegments[iSeg]->m_dStrings.Begin() ); dSorters[i]->SetMVAPool ( m_pSegments[iSeg]->m_dMvas.Begin() ); } RtRowIterator_t tIt ( m_pSegments[iSeg], m_iStride, false, NULL ); for ( ;; ) { const CSphRowitem * pRow = tIt.GetNextAliveRow(); if ( !pRow ) break; tMatch.m_iDocID = DOCINFO2ID(pRow); tMatch.m_pStatic = DOCINFO2ATTRS(pRow); // FIXME! overrides tCtx.CalcFilter ( tMatch ); if ( tCtx.m_pFilter && !tCtx.m_pFilter->Eval ( tMatch ) ) continue; tCtx.CalcSort ( tMatch ); tCtx.CalcFinal ( tMatch ); // OPTIMIZE? could be possibly done later // storing segment in matches tag for finding strings attrs offset later, biased against default zero tMatch.m_iTag = iSeg+1; bool bNewMatch = false; ARRAY_FOREACH ( iSorter, dSorters ) bNewMatch |= dSorters[iSorter]->Push ( tMatch ); // handle cutoff if ( bNewMatch ) if ( --iCutoff==0 ) break; } if ( iCutoff==0 ) break; } } else { // query matching ARRAY_FOREACH ( iSeg, m_pSegments ) { tTermSetup.m_pSeg = m_pSegments[iSeg]; pRanker->Reset ( tTermSetup ); // for lookups to work tCtx.m_pIndexData = m_pSegments[iSeg]; // set string pool for string on_sort expression fix up tCtx.SetStringPool ( m_pSegments[iSeg]->m_dStrings.Begin() ); tCtx.SetMVAPool ( m_pSegments[iSeg]->m_dMvas.Begin() ); ARRAY_FOREACH ( i, dSorters ) { dSorters[i]->SetStringPool ( m_pSegments[iSeg]->m_dStrings.Begin() ); dSorters[i]->SetMVAPool ( m_pSegments[iSeg]->m_dMvas.Begin() ); } CSphMatch * pMatch = pRanker->GetMatchesBuffer(); for ( ;; ) { int iMatches = pRanker->GetMatches(); if ( iMatches<=0 ) break; for ( int i=0; iEval ( pMatch[i] ) ) continue; // storing segment in matches tag for finding strings attrs offset later, biased against default zero pMatch[i].m_iTag = iSeg+1; bool bNewMatch = false; ARRAY_FOREACH ( iSorter, dSorters ) bNewMatch |= dSorters[iSorter]->Push ( pMatch[i] ); if ( bNewMatch ) if ( --iCutoff==0 ) break; } if ( iCutoff==0 ) { iSeg = m_pSegments.GetLength(); break; } } } } } ////////////////////// // coping match's attributes to external storage in result set ////////////////////// CSphVector dStringGetLoc; CSphVector dStringSetLoc; CSphVector dMvaGetLoc; CSphVector dMvaSetLoc; for ( int i=0; im_tSchema.GetAttrsCount(); i++ ) { const CSphColumnInfo & tSetInfo = pResult->m_tSchema.GetAttr(i); if ( tSetInfo.m_eAttrType==SPH_ATTR_STRING ) { const int iInLocator = m_tSchema.GetAttrIndex ( tSetInfo.m_sName.cstr() ); assert ( iInLocator>=0 ); dStringGetLoc.Add ( m_tSchema.GetAttr ( iInLocator ).m_tLocator ); dStringSetLoc.Add ( tSetInfo.m_tLocator ); } else if ( tSetInfo.m_eAttrType==SPH_ATTR_UINT32SET || tSetInfo.m_eAttrType==SPH_ATTR_UINT64SET ) { const int iInLocator = m_tSchema.GetAttrIndex ( tSetInfo.m_sName.cstr() ); assert ( iInLocator>=0 ); dMvaGetLoc.Add ( m_tSchema.GetAttr ( iInLocator ).m_tLocator ); dMvaSetLoc.Add ( tSetInfo.m_tLocator ); } } bool bHasArenaAttrs = ( dStringSetLoc.GetLength()>0 || dMvaSetLoc.GetLength()>0 ); const int iSegmentsTotal = m_pSegments.GetLength(); bool bSegmentMatchesFixup = ( m_tSchema.GetStaticSize()>0 && iSegmentsTotal>0 ); if ( bSegmentMatchesFixup || bHasArenaAttrs ) { MEMORY ( SPH_MEM_IDX_RT_RES_MATCHES ); // we need to count matches for allocating arena // as we are going to fix match's m_pStatic pointers later // and copy real match's data to arena int iFixupCount = 0; ARRAY_FOREACH ( iSorter, dSorters ) { ISphMatchSorter * pSorter = dSorters[iSorter]; const CSphMatch * pMatches = pSorter->Finalize(); const int iMatchesCount = pSorter->GetLength(); if ( bHasArenaAttrs ) { iFixupCount += iMatchesCount; continue; } for ( int i=0; i=0 && iMatchSegment< iSegmentsTotal ) iFixupCount++; } } if ( iFixupCount>0 || bHasArenaAttrs ) { const int iStaticSize = m_tSchema.GetStaticSize() + DWSIZEOF ( SphDocID_t ); CSphRowitem * pAttr = new CSphRowitem [ iFixupCount * iStaticSize ]; pResult->m_dStorage2Free.Add ( (BYTE*)pAttr ); #ifndef NDEBUG CSphRowitem * pEnd = pAttr + iFixupCount * iStaticSize; #endif ARRAY_FOREACH ( iSorter, dSorters ) { ISphMatchSorter * pSorter = dSorters[iSorter]; CSphMatch * pMatches = pSorter->Finalize(); const int iMatchesCount = pSorter->GetLength(); for ( int i=0; i=0 && iMatchSegment< iSegmentsTotal ) || bHasArenaAttrs ) { assert ( pAttr+iStaticSize<=pEnd ); memcpy ( pAttr, STATIC2DOCINFO ( pMatches[i].m_pStatic ), sizeof(CSphRowitem)*iStaticSize ); pMatches[i].m_pStatic = DOCINFO2ATTRS ( pAttr ); DOCINFOSETID ( pAttr, (SphDocID_t)0 ); // the zero docid will show that the data was copied pAttr += iStaticSize; } } } } } ////////////////////// // fixing string offset and data in resulting matches ////////////////////// MEMORY ( SPH_MEM_IDX_RT_RES_STRINGS ); if ( bHasArenaAttrs ) { assert ( !pResult->m_pStrings && !pResult->m_pMva ); CSphTightVector dStorageString; CSphTightVector dStorageMva; dStorageString.Add ( 0 ); dStorageMva.Add ( 0 ); ARRAY_FOREACH ( iSorter, dSorters ) { ISphMatchSorter * pSorter = dSorters[iSorter]; CSphMatch * pMatches = pSorter->Finalize(); const int iMatchesCount = pSorter->GetLength(); for ( int i=0; i=1 && tMatch.m_iTagm_dStrings.Begin() : dDiskStrings[ iStorageSrc-iSegCount ]; const DWORD * pBaseMva = bSegmentMatch ? m_pSegments[iStorageSrc]->m_dMvas.Begin() : dDiskMva[ iStorageSrc-iSegCount ]; ARRAY_FOREACH ( i, dStringGetLoc ) { DWORD uAttr = 0; const SphAttr_t uOff = tMatch.GetAttr ( dStringGetLoc[i] ); if ( uOff>0 ) // have to fix up only existed attribute { assert ( uOff<( I64C(1)<<32 ) ); // should be 32 bit offset assert ( !bSegmentMatch || (int)uOffm_dStrings.GetLength() ); uAttr = CopyPackedString ( pBaseString + uOff, dStorageString ); } const CSphAttrLocator & tSet = dStringSetLoc[i]; assert ( !tSet.m_bDynamic || tSet.GetMaxRowitem() < (int)tMatch.m_pDynamic[-1] ); sphSetRowAttr ( tSet.m_bDynamic ? tMatch.m_pDynamic : const_cast( tMatch.m_pStatic ), tSet, uAttr ); } ARRAY_FOREACH ( i, dMvaGetLoc ) { DWORD uAttr = 0; const SphAttr_t uOff = tMatch.GetAttr ( dMvaGetLoc[i] ); if ( uOff>0 ) // have to fix up only existed attribute { assert ( uOff<( I64C(1)<<32 ) ); // should be 32 bit offset assert ( !bSegmentMatch || (int)uOffm_dMvas.GetLength() ); uAttr = CopyMva ( pBaseMva + uOff, dStorageMva ); } const CSphAttrLocator & tSet = dMvaSetLoc[i]; assert ( !tSet.m_bDynamic || tSet.GetMaxRowitem() < (int)tMatch.m_pDynamic[-1] ); sphSetRowAttr ( tSet.m_bDynamic ? tMatch.m_pDynamic : const_cast( tMatch.m_pStatic ), tSet, uAttr ); } } } if ( dStorageString.GetLength()>1 ) { BYTE * pStrings = dStorageString.LeakData (); pResult->m_dStorage2Free.Add ( pStrings ); pResult->m_pStrings = pStrings; } if ( dStorageMva.GetLength()>1 ) { DWORD * pMva = dStorageMva.LeakData(); pResult->m_dStorage2Free.Add ( (BYTE*)pMva ); pResult->m_pMva = pMva; } } // query timer pResult->m_iQueryTime = int ( ( sphMicroTimer()-tmQueryStart )/1000 ); m_tRwlock.Unlock (); return true; } bool RtIndex_t::MultiQueryEx ( int iQueries, const CSphQuery * ppQueries, CSphQueryResult ** ppResults, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag ) const { // FIXME! OPTIMIZE! implement common subtree cache here bool bResult = false; for ( int i=0; im_iMultiplier = -1; return bResult; } bool RtIndex_t::GetKeywords ( CSphVector & dKeywords, const char * sQuery, bool bGetStats, CSphString & sError ) const { m_tRwlock.ReadLock(); // this is actually needed only if they want stats RtQword_t tQword; CSphString sBuffer ( sQuery ); CSphScopedPtr pTokenizer ( m_pTokenizer->Clone ( false ) ); // avoid race pTokenizer->SetBuffer ( (BYTE *)sBuffer.cstr(), sBuffer.Length() ); CSphScopedPtr tDictCloned ( NULL ); CSphDict * pDictBase = m_pDict; if ( pDictBase->HasState() ) { tDictCloned = pDictBase = pDictBase->Clone(); } while ( BYTE * pToken = pTokenizer->GetToken() ) { const char * sToken = (const char *)pToken; CSphString sWord ( sToken ); SphWordID_t iWord = pDictBase->GetWordID ( pToken ); if ( iWord ) { CSphKeywordInfo & tInfo = dKeywords.Add(); tInfo.m_sTokenized = sWord; tInfo.m_sNormalized = sToken; tInfo.m_iDocs = 0; tInfo.m_iHits = 0; if ( !bGetStats ) continue; tQword.m_iWordID = iWord; tQword.m_iDocs = 0; tQword.m_iHits = 0; ARRAY_FOREACH ( iSeg, m_pSegments ) RtQwordSetupSegment ( &tQword, m_pSegments[iSeg], false, m_bKeywordDict, m_iWordsCheckpoint ); tInfo.m_iDocs = tQword.m_iDocs; tInfo.m_iHits = tQword.m_iHits; } } // get stats from disk chunks too if ( bGetStats ) ARRAY_FOREACH ( iChunk, m_pDiskChunks ) { CSphVector dKeywords2; if ( !m_pDiskChunks[iChunk]->GetKeywords ( dKeywords2, sQuery, bGetStats, sError ) ) { m_tRwlock.Unlock(); return false; } if ( dKeywords.GetLength()!=dKeywords2.GetLength() ) { sError.SetSprintf ( "INTERNAL ERROR: keyword count mismatch (ram=%d, disk[%d]=%d)", dKeywords.GetLength(), iChunk, dKeywords2.GetLength() ); m_tRwlock.Unlock (); break; } ARRAY_FOREACH ( i, dKeywords ) { if ( dKeywords[i].m_sTokenized!=dKeywords2[i].m_sTokenized ) { sError.SetSprintf ( "INTERNAL ERROR: tokenized keyword mismatch (n=%d, ram=%s, disk[%d]=%s)", i, dKeywords[i].m_sTokenized.cstr(), iChunk, dKeywords2[i].m_sTokenized.cstr() ); m_tRwlock.Unlock (); break; } if ( dKeywords[i].m_sNormalized!=dKeywords2[i].m_sNormalized ) { sError.SetSprintf ( "INTERNAL ERROR: normalized keyword mismatch (n=%d, ram=%s, disk[%d]=%s)", i, dKeywords[i].m_sTokenized.cstr(), iChunk, dKeywords2[i].m_sTokenized.cstr() ); m_tRwlock.Unlock (); break; } dKeywords[i].m_iDocs += dKeywords2[i].m_iDocs; dKeywords[i].m_iHits += dKeywords2[i].m_iHits; } } m_tRwlock.Unlock(); return true; } // FIXME! might be inconsistent in case disk chunk update fails int RtIndex_t::UpdateAttributes ( const CSphAttrUpdate & tUpd, int iIndex, CSphString & sError ) { // check if we have to assert ( tUpd.m_dDocids.GetLength()==0 || tUpd.m_dRows.GetLength()==0 ); int iRows = Max ( tUpd.m_dDocids.GetLength(), tUpd.m_dRows.GetLength() ); bool bRaw = tUpd.m_dDocids.GetLength()==0; bool bHasMva = false; assert ( iRows==(int)tUpd.m_dRowOffset.GetLength() ); if ( !iRows ) return 0; // remap update schema to index schema CSphVector dLocators; CSphVector dIndexes; CSphVector dFloats; CSphVector dBigints; dLocators.Reserve ( tUpd.m_dAttrs.GetLength() ); dIndexes.Reserve ( tUpd.m_dAttrs.GetLength() ); dFloats.Reserve ( tUpd.m_dAttrs.GetLength() ); dBigints.Reserve ( tUpd.m_dAttrs.GetLength() ); // bigint flags for *source* schema. uint64_t uDst64 = 0; ARRAY_FOREACH ( i, tUpd.m_dAttrs ) { int iIndex = m_tSchema.GetAttrIndex ( tUpd.m_dAttrs[i].m_sName.cstr() ); if ( iIndex<0 ) { sError.SetSprintf ( "attribute '%s' not found", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } dBigints.Add ( tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_BIGINT ); // forbid updates on non-int columns const CSphColumnInfo & tCol = m_tSchema.GetAttr(iIndex); if ( !( tCol.m_eAttrType==SPH_ATTR_BOOL || tCol.m_eAttrType==SPH_ATTR_INTEGER || tCol.m_eAttrType==SPH_ATTR_TIMESTAMP || tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET || tCol.m_eAttrType==SPH_ATTR_BIGINT || tCol.m_eAttrType==SPH_ATTR_FLOAT )) { sError.SetSprintf ( "attribute '%s' can not be updated (must be boolean, integer, bigint, float or timestamp or MVA)", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } bool bSrcMva = ( tCol.m_eAttrType==SPH_ATTR_UINT32SET || tCol.m_eAttrType==SPH_ATTR_UINT64SET ); bool bDstMva = ( tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT32SET || tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ); if ( bSrcMva!=bDstMva ) { sError.SetSprintf ( "attribute '%s' MVA flag mismatch", tUpd.m_dAttrs[i].m_sName.cstr() ); return -1; } if ( tCol.m_eAttrType==SPH_ATTR_UINT32SET && tUpd.m_dAttrs[i].m_eAttrType==SPH_ATTR_UINT64SET ) { sError.SetSprintf ( "attribute '%s' MVA bits (dst=%d, src=%d) mismatch", tUpd.m_dAttrs[i].m_sName.cstr(), tCol.m_eAttrType, tUpd.m_dAttrs[i].m_eAttrType ); return -1; } if ( tCol.m_eAttrType==SPH_ATTR_UINT64SET ) uDst64 |= ( U64C(1)< ( bRaw? tUpd.m_dRows[iUpd] : m_pSegments[iSeg]->FindAliveRow ( tUpd.m_dDocids[iUpd] ) ); if ( !pRow ) continue; assert ( bRaw || ( DOCINFO2ID(pRow)==tUpd.m_dDocids[iUpd] ) ); pRow = DOCINFO2ATTRS(pRow); CSphTightVector * pStorageMVA = NULL; if ( bHasMva ) { if ( !bRaw ) { pStorageMVA = &m_pSegments[iSeg]->m_dMvas; } else { ARRAY_FOREACH ( iMva, m_pSegments ) { const CSphVector & dSegRows = m_pSegments[iMva]->m_dRows; if ( dSegRows.Begin()<=pRow && pRowm_dMvas; break; } } } } assert ( !bHasMva || pStorageMVA ); int iPos = tUpd.m_dRowOffset[iUpd]; ARRAY_FOREACH ( iCol, tUpd.m_dAttrs ) { if ( !( tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT32SET || tUpd.m_dAttrs[iCol].m_eAttrType==SPH_ATTR_UINT64SET ) ) { if ( dIndexes[iCol]>=0 ) { // plain update uUpdateMask |= ATTRS_UPDATED; SphAttr_t uValue = dBigints[iCol] ? MVA_UPSIZE ( &tUpd.m_dPool[iPos] ) : tUpd.m_dPool[iPos]; sphSetRowAttr ( pRow, dLocators[iCol], uValue ); iPos += dBigints[iCol]?2:1; } } else { const DWORD * pSrc = tUpd.m_dPool.Begin()+iPos; DWORD iLen = *pSrc; iPos += iLen+1; if ( dIndexes[iCol]>=0 ) { // MVA update uUpdateMask |= ATTRS_MVA_UPDATED; if ( !iLen ) { sphSetRowAttr ( pRow, dLocators[iCol], 0 ); continue; } bool bDst64 = ( ( uDst64 & ( U64C(1) << iCol ) )!=0 ); assert ( ( iLen%2 )==0 ); DWORD uCount = ( bDst64 ? iLen : iLen/2 ); DWORD uMvaOff = MVA_DOWNSIZE ( sphGetRowAttr ( pRow, dLocators[iCol] ) ); assert ( uMvaOff<(DWORD)pStorageMVA->GetLength() ); DWORD * pDst = pStorageMVA->Begin() + uMvaOff; if ( uCount>(*pDst) ) { uMvaOff = pStorageMVA->GetLength(); pStorageMVA->Resize ( uMvaOff+uCount+1 ); pDst = pStorageMVA->Begin()+uMvaOff; sphSetRowAttr ( pRow, dLocators[iCol], uMvaOff ); } if ( bDst64 ) { memcpy ( pDst, pSrc, sizeof(DWORD)*(uCount+1) ); } else { *pDst++ = uCount; // MVA values counter first pSrc++; while ( uCount-- ) { *pDst = *pSrc; pDst++; pSrc+=2; } } } } } bUpdated = true; iUpdated++; } if ( bUpdated ) continue; // check disk K-list now if ( iUpdated==1 ) { m_tKlist.Flush(); // no need to lock here as it got protected here by writer locks } const SphAttr_t uRef = bRaw ? DOCINFO2ID ( tUpd.m_dRows[iUpd] ) : tUpd.m_dDocids[iUpd]; bUpdated = ( sphBinarySearch ( m_tKlist.GetKillList(), m_tKlist.GetKillList() + m_tKlist.GetKillListSize() - 1, uRef )!=NULL ); if ( bUpdated ) continue; // finally, try disk chunks for ( int iChunk = m_pDiskChunks.GetLength()-1; iChunk>=0; iChunk-- ) { // run just this update // FIXME! might be inefficient in case of big batches (redundant allocs in disk update) int iRes = m_pDiskChunks[iChunk]->UpdateAttributes ( tUpd, iUpd, sError ); // errors are highly unlikely at this point // FIXME! maybe emit a warning to client as well? if ( iRes<0 ) { sphWarn ( "INTERNAL ERROR: index %s chunk %d update failure: %s", m_sIndexName.cstr(), iChunk, sError.cstr() ); continue; } // update stats iUpdated += iRes; // we only need to update the most fresh chunk if ( iRes>0 ) break; } } // bump the counter, binlog the update! assert ( iIndex<0 ); g_pBinlog->BinlogUpdateAttributes ( &m_iTID, m_sIndexName.cstr(), tUpd ); m_tRwlock.Unlock(); // all done return iUpdated; } ////////////////////////////////////////////////////////////////////////// // MAGIC CONVERSIONS ////////////////////////////////////////////////////////////////////////// bool RtIndex_t::AttachDiskIndex ( CSphIndex * pIndex, CSphString & sError ) { // safeguards // we do not support some of the disk index features in RT just yet #define LOC_ERROR(_arg) { sError = _arg; return false; } const CSphIndexSettings & tSettings = pIndex->GetSettings(); if ( tSettings.m_bIndexSP!=false ) LOC_ERROR ( "ATTACH currently requires index_sp=0 in disk index (RT-side support not implemented yet)" ); if ( !tSettings.m_sZones.IsEmpty() ) LOC_ERROR ( "ATTACH currently requires no zones in disk index (RT-side support not implemented yet)" ); if ( tSettings.m_iBoundaryStep!=0 ) LOC_ERROR ( "ATTACH currently requires boundary_step=0 in disk index (RT-side support not implemented yet)" ); if ( tSettings.m_iStopwordStep!=1 ) LOC_ERROR ( "ATTACH currently requires stopword_step=1 in disk index (RT-side support not implemented yet)" ); #undef LOC_ERROR // ATTACH needs an exclusive global lock on both indexes // source disk index must come in locked internally // target RT index lock is acquired here m_tWriterMutex.Lock(); m_tRwlock.WriteLock(); // for now, let's do the simplest possible thing // and attach new data to empty RT indexes only bool bHasData = ( m_pDiskChunks.GetLength()!=0 ); ARRAY_FOREACH_COND ( i, m_pSegments, !bHasData ) bHasData = ( m_pSegments[i]->m_iAliveRows!=0 ); if ( bHasData ) { m_tRwlock.Unlock(); m_tWriterMutex.Unlock(); sError.SetSprintf ( "ATTACH currently supports empty target RT indexes only" ); return false; } // rename that source index to our chunk0 CSphString sChunk; sChunk.SetSprintf ( "%s.0", m_sPath.cstr() ); if ( !pIndex->Rename ( sChunk.cstr() ) ) { m_tRwlock.Unlock(); m_tWriterMutex.Unlock(); sError.SetSprintf ( "ATTACH failed: %s", pIndex->GetLastError().cstr() ); return false; } // copy schema from chunk0 schema m_tSchema = pIndex->GetMatchSchema(); m_tStats = pIndex->GetStats(); m_iStride = DOCINFO_IDSIZE + m_tSchema.GetRowSize(); // copy tokenizer, dict etc settings from chunk0 SafeDelete ( m_pTokenizer ); SafeDelete ( m_pDict ); m_pTokenizer = pIndex->GetTokenizer()->Clone ( false ); m_pDict = pIndex->GetDictionary()->Clone (); // FIXME? what about copying m_TID etc? // recreate disk chunk list, resave header file m_pDiskChunks.Add ( pIndex ); SaveMeta ( m_pDiskChunks.GetLength() ); // FIXME? do something about binlog too? // g_pBinlog->NotifyIndexFlush ( m_sIndexName.cstr(), m_iTID, false ); // all done Verify ( m_tRwlock.Unlock() ); Verify ( m_tWriterMutex.Unlock() ); return true; } ////////////////////////////////////////////////////////////////////////// // BINLOG ////////////////////////////////////////////////////////////////////////// extern DWORD g_dSphinxCRC32 [ 256 ]; static CSphString MakeBinlogName ( const char * sPath, int iExt ) { CSphString sName; sName.SetSprintf ( "%s/binlog.%03d", sPath, iExt ); return sName; } BinlogWriter_c::BinlogWriter_c () { m_iLastWritePos = 0; m_iLastFsyncPos = 0; ResetCrc(); } void BinlogWriter_c::ResetCrc () { m_uCRC = ~((DWORD)0); } void BinlogWriter_c::PutBytes ( const void * pData, int iSize ) { BYTE * b = (BYTE*) pData; for ( int i=0; i> 8) ^ g_dSphinxCRC32 [ (m_uCRC ^ *b++) & 0xff ]; CSphWriter::PutBytes ( pData, iSize ); } void BinlogWriter_c::PutString ( const char * szString ) { int iLen = szString ? strlen ( szString ) : 0; ZipValue ( iLen ); if ( iLen ) PutBytes ( szString, iLen ); } void BinlogWriter_c::ZipValue ( uint64_t uValue ) { BYTE uBuf[16]; int iLen = 0; while ( uValue>=0x80 ) { uBuf[iLen++] = (BYTE)( 0x80 | ( uValue & 0x7f ) ); uValue >>= 7; } uBuf[iLen++] = (BYTE)uValue; PutBytes ( uBuf, iLen ); } void BinlogWriter_c::WriteCrc () { m_uCRC = ~m_uCRC; CSphWriter::PutDword ( m_uCRC ); m_uCRC = ~((DWORD)0); } void BinlogWriter_c::Flush () { Write(); Fsync(); } void BinlogWriter_c::Write () { if ( m_iPoolUsed<=0 ) return; CSphWriter::Flush(); m_iLastWritePos = GetPos(); } #if USE_WINDOWS int fsync ( int iFD ) { // map fd to handle HANDLE h = (HANDLE) _get_osfhandle ( iFD ); if ( h==INVALID_HANDLE_VALUE ) { errno = EBADF; return -1; } // do flush if ( FlushFileBuffers(h) ) return 0; // error handling errno = EIO; if ( GetLastError()==ERROR_INVALID_HANDLE ) errno = EINVAL; return -1; } #endif void BinlogWriter_c::Fsync () { if ( !HasUnsyncedData() ) return; m_bError = ( fsync ( m_iFD )!=0 ); if ( m_bError && m_pError ) m_pError->SetSprintf ( "failed to sync %s: %s" , m_sName.cstr(), strerror(errno) ); m_iLastFsyncPos = GetPos(); } ////////////////////////////////////////////////////////////////////////// void BinlogReader_c::ResetCrc () { m_uCRC = ~(DWORD(0)); } void BinlogReader_c::GetBytes ( void * pData, int iSize ) { CSphReader::GetBytes ( pData, iSize ); BYTE * b = (BYTE*) pData; for ( int i=0; i> 8) ^ g_dSphinxCRC32 [ (m_uCRC ^ *b++) & 0xff ]; } DWORD BinlogReader_c::GetDword () { DWORD uRes; GetBytes ( &uRes, sizeof(DWORD) ); return uRes; } CSphString BinlogReader_c::GetString () { CSphString sRes; int iLen = (int) UnzipValue(); if ( iLen ) { sRes.Reserve ( iLen ); GetBytes ( (BYTE*)sRes.cstr(), iLen ); } return sRes; } uint64_t BinlogReader_c::UnzipValue () { uint64_t uRes = 0; int iOff = 0, iByte; do { iByte = CSphReader::GetByte(); uRes += ( (uint64_t)( iByte & 0x7f ) << iOff ); iOff += 7; m_uCRC = (m_uCRC >> 8) ^ g_dSphinxCRC32 [ (m_uCRC ^ (BYTE)iByte) & 0xff ]; } while ( iByte>=128 ); return uRes; } bool BinlogReader_c::CheckCrc ( const char * sOp, const char * sIndexName, int64_t iTid, int64_t iTxnPos ) { DWORD uRef = CSphAutoreader::GetDword(); m_uCRC = ~m_uCRC; if ( uRef!=m_uCRC ) sphWarning ( "binlog: %s: CRC mismatch (index=%s, tid="INT64_FMT", pos="INT64_FMT")", sOp, sIndexName ? sIndexName : "", iTid, iTxnPos ); return uRef==m_uCRC; } ////////////////////////////////////////////////////////////////////////// RtBinlog_c::RtBinlog_c () : m_iFlushTimeLeft ( 0 ) , m_iFlushPeriod ( BINLOG_AUTO_FLUSH ) , m_eOnCommit ( ACTION_NONE ) , m_iLockFD ( -1 ) , m_bReplayMode ( false ) , m_bDisabled ( true ) , m_iRestartSize ( 0 ) { MEMORY ( SPH_MEM_BINLOG ); Verify ( m_tWriteLock.Init() ); m_tWriter.SetBufferSize ( BINLOG_WRITE_BUFFER ); } RtBinlog_c::~RtBinlog_c () { if ( !m_bDisabled ) { m_iFlushPeriod = 0; if ( m_eOnCommit!=ACTION_FSYNC ) sphThreadJoin ( &m_tUpdateTread ); DoCacheWrite(); m_tWriter.CloseFile(); LockFile ( false ); } Verify ( m_tWriteLock.Done() ); } void RtBinlog_c::BinlogCommit ( int64_t * pTID, const char * sIndexName, const RtSegment_t * pSeg, const CSphVector & dKlist, bool bKeywordDict ) { if ( m_bReplayMode || m_bDisabled ) return; MEMORY ( SPH_MEM_BINLOG ); Verify ( m_tWriteLock.Lock() ); int64_t iTID = ++(*pTID); const int64_t tmNow = sphMicroTimer(); const int uIndex = GetWriteIndexID ( sIndexName, iTID, tmNow ); // header m_tWriter.PutDword ( BLOP_MAGIC ); m_tWriter.ResetCrc (); m_tWriter.ZipValue ( BLOP_COMMIT ); m_tWriter.ZipValue ( uIndex ); m_tWriter.ZipValue ( iTID ); m_tWriter.ZipValue ( tmNow ); // save txn data if ( !pSeg || !pSeg->m_iRows ) { m_tWriter.ZipValue ( 0 ); } else { m_tWriter.ZipValue ( pSeg->m_iRows ); SaveVector ( m_tWriter, pSeg->m_dWords ); m_tWriter.ZipValue ( pSeg->m_dWordCheckpoints.GetLength() ); if ( !bKeywordDict ) { ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { m_tWriter.ZipValue ( pSeg->m_dWordCheckpoints[i].m_iOffset ); m_tWriter.ZipValue ( pSeg->m_dWordCheckpoints[i].m_iWordID ); } } else { const char * pBase = (const char *)pSeg->m_dKeywordCheckpoints.Begin(); ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { m_tWriter.ZipValue ( pSeg->m_dWordCheckpoints[i].m_iOffset ); m_tWriter.ZipValue ( pSeg->m_dWordCheckpoints[i].m_sWord - pBase ); } } SaveVector ( m_tWriter, pSeg->m_dDocs ); SaveVector ( m_tWriter, pSeg->m_dHits ); SaveVector ( m_tWriter, pSeg->m_dRows ); SaveVector ( m_tWriter, pSeg->m_dStrings ); SaveVector ( m_tWriter, pSeg->m_dMvas ); SaveVector ( m_tWriter, pSeg->m_dKeywordCheckpoints ); } SaveVector ( m_tWriter, dKlist ); // checksum m_tWriter.WriteCrc (); // finalize CheckDoFlush(); CheckDoRestart(); Verify ( m_tWriteLock.Unlock() ); } void RtBinlog_c::BinlogUpdateAttributes ( int64_t * pTID, const char * sIndexName, const CSphAttrUpdate & tUpd ) { if ( m_bReplayMode || m_bDisabled ) return; MEMORY ( SPH_MEM_BINLOG ); Verify ( m_tWriteLock.Lock() ); int64_t iTID = ++(*pTID); const int64_t tmNow = sphMicroTimer(); const int uIndex = GetWriteIndexID ( sIndexName, iTID, tmNow ); // header m_tWriter.PutDword ( BLOP_MAGIC ); m_tWriter.ResetCrc (); m_tWriter.ZipValue ( BLOP_UPDATE_ATTRS ); m_tWriter.ZipValue ( uIndex ); m_tWriter.ZipValue ( iTID ); m_tWriter.ZipValue ( tmNow ); // update data m_tWriter.ZipValue ( tUpd.m_dAttrs.GetLength() ); ARRAY_FOREACH ( i, tUpd.m_dAttrs ) { m_tWriter.PutString ( tUpd.m_dAttrs[i].m_sName.cstr() ); m_tWriter.ZipValue ( tUpd.m_dAttrs[i].m_eAttrType ); } CSphVector dActiveDocids; bool bUseRaw = false; if ( tUpd.m_dDocids.GetLength()==0 && tUpd.m_dRows.GetLength()!=0 ) { bUseRaw = true; dActiveDocids.Resize ( tUpd.m_dRows.GetLength() ); ARRAY_FOREACH ( i, tUpd.m_dRows ) dActiveDocids[i] = DOCINFO2ID ( tUpd.m_dRows[i] ); } const CSphVector & dBinlogDocids = bUseRaw ? dActiveDocids : tUpd.m_dDocids; // POD vectors SaveVector ( m_tWriter, tUpd.m_dPool ); SaveVector ( m_tWriter, dBinlogDocids ); dActiveDocids.Reset(); SaveVector ( m_tWriter, tUpd.m_dRowOffset ); // checksum m_tWriter.WriteCrc (); // finalize CheckDoFlush(); CheckDoRestart(); Verify ( m_tWriteLock.Unlock() ); } // here's been going binlogs with ALL closed indices removing void RtBinlog_c::NotifyIndexFlush ( const char * sIndexName, int64_t iTID, bool bShutdown ) { if ( m_bReplayMode ) sphInfo ( "index '%s': ramchunk saved. TID="INT64_FMT"", sIndexName, iTID ); if ( m_bReplayMode || m_bDisabled ) return; MEMORY ( SPH_MEM_BINLOG ); assert ( bShutdown || m_dLogFiles.GetLength() ); Verify ( m_tWriteLock.Lock() ); bool bCurrentLogShut = false; const int iPreflushFiles = m_dLogFiles.GetLength(); // loop through all log files, and check if we can unlink any ARRAY_FOREACH ( iLog, m_dLogFiles ) { BinlogFileDesc_t & tLog = m_dLogFiles[iLog]; bool bUsed = false; // update index info for this log file ARRAY_FOREACH ( i, tLog.m_dIndexInfos ) { BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos[i]; // this index was just flushed, update flushed TID if ( tIndex.m_sName==sIndexName ) { assert ( iTID>=tIndex.m_iFlushedTID ); tIndex.m_iFlushedTID = Max ( tIndex.m_iFlushedTID, iTID ); } // if max logged TID is greater than last flushed TID, log file still has needed recovery data if ( tIndex.m_iFlushedTID < tIndex.m_iMaxTID ) bUsed = true; } // it's needed, keep looking if ( bUsed ) continue; // hooray, we can remove this log! // if this is our current log, we have to close it first if ( iLog==m_dLogFiles.GetLength()-1 ) { m_tWriter.CloseFile (); bCurrentLogShut = true; } // do unlink CSphString sLog = MakeBinlogName ( m_sLogPath.cstr(), tLog.m_iExt ); if ( ::unlink ( sLog.cstr() ) ) sphWarning ( "binlog: failed to unlink %s: %s (remove it manually)", sLog.cstr(), strerror(errno) ); // we need to reset it, otherwise there might be leftover data after last Remove() m_dLogFiles[iLog] = BinlogFileDesc_t(); // quit tracking it m_dLogFiles.Remove ( iLog-- ); } if ( bCurrentLogShut && !bShutdown ) { // if current log was closed, we need a new one (it will automatically save meta, too) OpenNewLog (); } else if ( iPreflushFiles!=m_dLogFiles.GetLength() ) { // if we unlinked any logs, we need to save meta, too SaveMeta (); } Verify ( m_tWriteLock.Unlock() ); } void RtBinlog_c::Configure ( const CSphConfigSection & hSearchd, bool bTestMode ) { MEMORY ( SPH_MEM_BINLOG ); const int iMode = hSearchd.GetInt ( "binlog_flush", 2 ); switch ( iMode ) { case 0: m_eOnCommit = ACTION_NONE; break; case 1: m_eOnCommit = ACTION_FSYNC; break; case 2: m_eOnCommit = ACTION_WRITE; break; default: sphDie ( "unknown binlog flush mode %d (must be 0, 1, or 2)\n", iMode ); } #ifndef DATADIR #define DATADIR "." #endif m_sLogPath = hSearchd.GetStr ( "binlog_path", bTestMode ? "" : DATADIR ); m_bDisabled = m_sLogPath.IsEmpty(); m_iRestartSize = hSearchd.GetSize ( "binlog_max_log_size", m_iRestartSize ); if ( !m_bDisabled ) { LockFile ( true ); LoadMeta(); } } void RtBinlog_c::Replay ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, ProgressCallbackSimple_t * pfnProgressCallback ) { if ( m_bDisabled || !hIndexes.GetLength() ) return; // on replay started if ( pfnProgressCallback ) pfnProgressCallback(); int64_t tmReplay = sphMicroTimer(); // do replay m_bReplayMode = true; int iLastLogState = 0; ARRAY_FOREACH ( i, m_dLogFiles ) { iLastLogState = ReplayBinlog ( hIndexes, uReplayFlags, i ); if ( pfnProgressCallback ) // on each replayed binlog pfnProgressCallback(); } if ( m_dLogFiles.GetLength()>0 ) { tmReplay = sphMicroTimer() - tmReplay; sphInfo ( "binlog: finished replaying total %d in %d.%03d sec", m_dLogFiles.GetLength(), (int)(tmReplay/1000000), (int)((tmReplay/1000)%1000) ); } // FIXME? // in some cases, indexes might had been flushed during replay // and we might therefore want to update m_iFlushedTID everywhere // but for now, let's just wait until next flush for simplicity // resume normal operation m_bReplayMode = false; OpenNewLog ( iLastLogState ); } void RtBinlog_c::CreateTimerThread () { if ( !m_bDisabled && m_eOnCommit!=ACTION_FSYNC ) { m_iFlushTimeLeft = sphMicroTimer() + m_iFlushPeriod; sphThreadCreate ( &m_tUpdateTread, RtBinlog_c::DoAutoFlush, this ); } } void RtBinlog_c::DoAutoFlush ( void * pBinlog ) { assert ( pBinlog ); RtBinlog_c * pLog = (RtBinlog_c *)pBinlog; assert ( !pLog->m_bDisabled ); while ( pLog->m_iFlushPeriod>0 ) { if ( pLog->m_iFlushTimeLeft < sphMicroTimer() ) { MEMORY ( SPH_MEM_BINLOG ); pLog->m_iFlushTimeLeft = sphMicroTimer() + pLog->m_iFlushPeriod; if ( pLog->m_eOnCommit==ACTION_NONE || pLog->m_tWriter.HasUnwrittenData() ) { Verify ( pLog->m_tWriteLock.Lock() ); pLog->m_tWriter.Flush(); Verify ( pLog->m_tWriteLock.Unlock() ); } if ( pLog->m_tWriter.HasUnsyncedData() ) pLog->m_tWriter.Fsync(); } // sleep N msec before next iter or terminate because of shutdown sphSleepMsec ( 100 ); } } int RtBinlog_c::GetWriteIndexID ( const char * sName, int64_t iTID, int64_t tmNow ) { MEMORY ( SPH_MEM_BINLOG ); assert ( m_dLogFiles.GetLength() ); // OPTIMIZE? maybe hash them? BinlogFileDesc_t & tLog = m_dLogFiles.Last(); ARRAY_FOREACH ( i, tLog.m_dIndexInfos ) { BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos[i]; if ( tIndex.m_sName==sName ) { tIndex.m_iMaxTID = Max ( tIndex.m_iMaxTID, iTID ); tIndex.m_tmMax = Max ( tIndex.m_tmMax, tmNow ); return i; } } // create a new entry int iID = tLog.m_dIndexInfos.GetLength(); BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos.Add(); // caller must hold a wlock tIndex.m_sName = sName; tIndex.m_iMinTID = iTID; tIndex.m_iMaxTID = iTID; tIndex.m_iFlushedTID = 0; tIndex.m_tmMin = tmNow; tIndex.m_tmMax = tmNow; // log this new entry m_tWriter.PutDword ( BLOP_MAGIC ); m_tWriter.ResetCrc (); m_tWriter.ZipValue ( BLOP_ADD_INDEX ); m_tWriter.ZipValue ( iID ); m_tWriter.PutString ( sName ); m_tWriter.ZipValue ( iTID ); m_tWriter.ZipValue ( tmNow ); m_tWriter.WriteCrc (); // return the index return iID; } void RtBinlog_c::LoadMeta () { MEMORY ( SPH_MEM_BINLOG ); CSphString sMeta; sMeta.SetSprintf ( "%s/binlog.meta", m_sLogPath.cstr() ); if ( !sphIsReadable ( sMeta.cstr() ) ) return; CSphString sError; // opened and locked, lets read CSphAutoreader rdMeta; if ( !rdMeta.Open ( sMeta, sError ) ) sphDie ( "%s error: %s", sMeta.cstr(), sError.cstr() ); if ( rdMeta.GetDword()!=BINLOG_META_MAGIC ) sphDie ( "invalid meta file %s", sMeta.cstr() ); // binlog meta v1 was dev only, crippled, and we don't like it anymore // binlog metas v2 upto current v4 (and likely up) share the same simplistic format // so let's support empty (!) binlogs w/ known versions and compatible metas DWORD uVersion = rdMeta.GetDword(); if ( uVersion==1 || uVersion>BINLOG_VERSION ) sphDie ( "binlog meta file %s is v.%d, binary is v.%d; recovery requires previous binary version", sMeta.cstr(), uVersion, BINLOG_VERSION ); const bool bLoaded64bit = ( rdMeta.GetByte()==1 ); m_dLogFiles.Resize ( rdMeta.UnzipInt() ); // FIXME! sanity check if ( !m_dLogFiles.GetLength() ) return; // ok, so there is actual recovery data // let's require that exact version and bitness, then if ( uVersion!=BINLOG_VERSION ) sphDie ( "binlog meta file %s is v.%d, binary is v.%d; recovery requires previous binary version", sMeta.cstr(), uVersion, BINLOG_VERSION ); if ( bLoaded64bit!=USE_64BIT ) sphDie ( "USE_64BIT inconsistency (binary=%d, binlog=%d); recovery requires previous binary version", USE_64BIT, bLoaded64bit ); // load list of active log files ARRAY_FOREACH ( i, m_dLogFiles ) m_dLogFiles[i].m_iExt = rdMeta.UnzipInt(); // everything else is saved in logs themselves } void RtBinlog_c::SaveMeta () { MEMORY ( SPH_MEM_BINLOG ); CSphString sMeta, sMetaOld; sMeta.SetSprintf ( "%s/binlog.meta.new", m_sLogPath.cstr() ); sMetaOld.SetSprintf ( "%s/binlog.meta", m_sLogPath.cstr() ); CSphString sError; // opened and locked, lets write CSphWriter wrMeta; if ( !wrMeta.OpenFile ( sMeta, sError ) ) sphDie ( "failed to open '%s': '%s'", sMeta.cstr(), sError.cstr() ); wrMeta.PutDword ( BINLOG_META_MAGIC ); wrMeta.PutDword ( BINLOG_VERSION ); wrMeta.PutByte ( USE_64BIT ); // save list of active log files wrMeta.ZipInt ( m_dLogFiles.GetLength() ); ARRAY_FOREACH ( i, m_dLogFiles ) wrMeta.ZipInt ( m_dLogFiles[i].m_iExt ); // everything else is saved in logs themselves wrMeta.CloseFile(); if ( ::rename ( sMeta.cstr(), sMetaOld.cstr() ) ) sphDie ( "failed to rename meta (src=%s, dst=%s, errno=%d, error=%s)", sMeta.cstr(), sMetaOld.cstr(), errno, strerror(errno) ); // !COMMIT handle this gracefully sphLogDebug ( "SaveMeta: Done." ); } void RtBinlog_c::LockFile ( bool bLock ) { CSphString sName; sName.SetSprintf ( "%s/binlog.lock", m_sLogPath.cstr() ); if ( bLock ) { assert ( m_iLockFD==-1 ); const int iLockFD = ::open ( sName.cstr(), SPH_O_NEW, 0644 ); if ( iLockFD<0 ) sphDie ( "failed to open '%s': %u '%s'", sName.cstr(), errno, strerror(errno) ); if ( !sphLockEx ( iLockFD, false ) ) sphDie ( "failed to lock '%s': %u '%s'", sName.cstr(), errno, strerror(errno) ); m_iLockFD = iLockFD; } else { SafeClose ( m_iLockFD ); ::unlink ( sName.cstr() ); } } void RtBinlog_c::OpenNewLog ( int iLastState ) { MEMORY ( SPH_MEM_BINLOG ); // calc new ext int iExt = 1; if ( m_dLogFiles.GetLength() ) { iExt = m_dLogFiles.Last().m_iExt; if ( !iLastState ) iExt++; } // create entry // we need to reset it, otherwise there might be leftover data after last Remove() BinlogFileDesc_t tLog; tLog.m_iExt = iExt; m_dLogFiles.Add ( tLog ); // create file CSphString sLog = MakeBinlogName ( m_sLogPath.cstr(), tLog.m_iExt ); if ( !iLastState ) // reuse the last binlog since it is empty or useless. ::unlink ( sLog.cstr() ); if ( !m_tWriter.OpenFile ( sLog.cstr(), m_sWriterError ) ) sphDie ( "failed to create %s: errno=%d, error=%s", sLog.cstr(), errno, strerror(errno) ); // emit header m_tWriter.PutDword ( BINLOG_HEADER_MAGIC ); m_tWriter.PutDword ( BINLOG_VERSION ); // update meta SaveMeta(); } void RtBinlog_c::DoCacheWrite () { if ( !m_dLogFiles.GetLength() ) return; const CSphVector & dIndexes = m_dLogFiles.Last().m_dIndexInfos; m_tWriter.PutDword ( BLOP_MAGIC ); m_tWriter.ResetCrc (); m_tWriter.ZipValue ( BLOP_ADD_CACHE ); m_tWriter.ZipValue ( dIndexes.GetLength() ); ARRAY_FOREACH ( i, dIndexes ) { m_tWriter.PutString ( dIndexes[i].m_sName.cstr() ); m_tWriter.ZipValue ( dIndexes[i].m_iMinTID ); m_tWriter.ZipValue ( dIndexes[i].m_iMaxTID ); m_tWriter.ZipValue ( dIndexes[i].m_iFlushedTID ); m_tWriter.ZipValue ( dIndexes[i].m_tmMin ); m_tWriter.ZipValue ( dIndexes[i].m_tmMax ); } m_tWriter.WriteCrc (); } void RtBinlog_c::CheckDoRestart () { // restart on exceed file size limit if ( m_iRestartSize>0 && m_tWriter.GetPos()>m_iRestartSize ) { MEMORY ( SPH_MEM_BINLOG ); assert ( m_dLogFiles.GetLength() ); DoCacheWrite(); m_tWriter.CloseFile(); OpenNewLog(); } } void RtBinlog_c::CheckDoFlush () { if ( m_eOnCommit==ACTION_NONE ) return; if ( m_eOnCommit==ACTION_WRITE && m_tWriter.HasUnwrittenData() ) m_tWriter.Write(); if ( m_eOnCommit==ACTION_FSYNC && m_tWriter.HasUnsyncedData() ) { if ( m_tWriter.HasUnwrittenData() ) m_tWriter.Write(); m_tWriter.Fsync(); } } int RtBinlog_c::ReplayBinlog ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, int iBinlog ) { assert ( iBinlog>=0 && iBinlog=BLOP_TOTAL ) sphDie ( "binlog: unexpected entry (blop="UINT64_FMT", pos="INT64_FMT")", uOp, iPos ); // FIXME! blop might be OK but skipped (eg. index that is no longer) switch ( uOp ) { case BLOP_COMMIT: bReplayOK = ReplayCommit ( iBinlog, uReplayFlags, tReader ); break; case BLOP_UPDATE_ATTRS: bReplayOK = ReplayUpdateAttributes ( iBinlog, tReader ); break; case BLOP_ADD_INDEX: bReplayOK = ReplayIndexAdd ( iBinlog, hIndexes, tReader ); break; case BLOP_ADD_CACHE: if ( bHaveCacheOp ) sphDie ( "binlog: internal error, second BLOP_ADD_CACHE detected (corruption?)" ); bHaveCacheOp = true; bReplayOK = ReplayCacheAdd ( iBinlog, tReader ); break; default: sphDie ( "binlog: internal error, unhandled entry (blop=%d)", (int)uOp ); } dTotal [ uOp ] += bReplayOK?1:0; dTotal [ BLOP_TOTAL ]++; } tmReplay = sphMicroTimer() - tmReplay; if ( tReader.GetErrorFlag() ) sphWarning ( "binlog: log io error at pos="INT64_FMT": %s", iPos, sError.cstr() ); if ( !bReplayOK ) sphWarning ( "binlog: replay error at pos="INT64_FMT")", iPos ); // show additional replay statistics ARRAY_FOREACH ( i, tLog.m_dIndexInfos ) { const BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos[i]; if ( !hIndexes ( tIndex.m_sName.cstr() ) ) { sphWarning ( "binlog: index %s: missing; tids "INT64_FMT" to "INT64_FMT" skipped!", tIndex.m_sName.cstr(), tIndex.m_iMinTID, tIndex.m_iMaxTID ); } else if ( tIndex.m_iPreReplayTID < tIndex.m_iMaxTID ) { sphInfo ( "binlog: index %s: recovered from tid "INT64_FMT" to tid "INT64_FMT, tIndex.m_sName.cstr(), tIndex.m_iPreReplayTID, tIndex.m_iMaxTID ); } else { sphInfo ( "binlog: index %s: skipped at tid "INT64_FMT" and max binlog tid "INT64_FMT, tIndex.m_sName.cstr(), tIndex.m_iPreReplayTID, tIndex.m_iMaxTID ); } } sphInfo ( "binlog: replay stats: %d rows in %d commits; %d updates; %d indexes", m_iReplayedRows, dTotal[BLOP_COMMIT], dTotal[BLOP_UPDATE_ATTRS], dTotal[BLOP_ADD_INDEX] ); sphInfo ( "binlog: finished replaying %s; %d.%d MB in %d.%03d sec", sLog.cstr(), (int)(iFileSize/1048576), (int)((iFileSize*10/1048576)%10), (int)(tmReplay/1000000), (int)((tmReplay/1000)%1000) ); if ( bHaveCacheOp && dTotal[BLOP_TOTAL]==1 ) // only one operation, that is Add Cache - by the fact, empty binlog return 1; return 0; } static BinlogIndexInfo_t & ReplayIndexID ( BinlogReader_c & tReader, BinlogFileDesc_t & tLog, const char * sPlace ) { const int64_t iTxnPos = tReader.GetPos(); const int iVal = (int)tReader.UnzipValue(); if ( iVal<0 || iVal>=tLog.m_dIndexInfos.GetLength() ) sphDie ( "binlog: %s: unexpected index id (id=%d, max=%d, pos="INT64_FMT")", sPlace, iVal, tLog.m_dIndexInfos.GetLength(), iTxnPos ); return tLog.m_dIndexInfos[iVal]; } bool RtBinlog_c::ReplayCommit ( int iBinlog, DWORD uReplayFlags, BinlogReader_c & tReader ) const { // load and lookup index const int64_t iTxnPos = tReader.GetPos(); BinlogFileDesc_t & tLog = m_dLogFiles[iBinlog]; BinlogIndexInfo_t & tIndex = ReplayIndexID ( tReader, tLog, "commit" ); // load transaction data const int64_t iTID = (int64_t) tReader.UnzipValue(); const int64_t tmStamp = (int64_t) tReader.UnzipValue(); CSphScopedPtr pSeg ( NULL ); CSphVector dKlist; int iRows = (int)tReader.UnzipValue(); if ( iRows ) { pSeg = new RtSegment_t(); pSeg->m_iRows = pSeg->m_iAliveRows = iRows; m_iReplayedRows += iRows; LoadVector ( tReader, pSeg->m_dWords ); pSeg->m_dWordCheckpoints.Resize ( (int) tReader.UnzipValue() ); // FIXME! sanity check ARRAY_FOREACH ( i, pSeg->m_dWordCheckpoints ) { pSeg->m_dWordCheckpoints[i].m_iOffset = (int) tReader.UnzipValue(); pSeg->m_dWordCheckpoints[i].m_iWordID = (SphWordID_t )tReader.UnzipValue(); } LoadVector ( tReader, pSeg->m_dDocs ); LoadVector ( tReader, pSeg->m_dHits ); LoadVector ( tReader, pSeg->m_dRows ); LoadVector ( tReader, pSeg->m_dStrings ); LoadVector ( tReader, pSeg->m_dMvas ); LoadVector ( tReader, pSeg->m_dKeywordCheckpoints ); } LoadVector ( tReader, dKlist ); // checksum if ( tReader.GetErrorFlag() || !tReader.CheckCrc ( "commit", tIndex.m_sName.cstr(), iTID, iTxnPos ) ) return false; // check TID if ( iTID tIndex.m_pRT->m_iTID ) { // we normally expect per-index TIDs to be sequential // but let's be graceful about that if ( iTID!=tIndex.m_pRT->m_iTID+1 ) sphWarning ( "binlog: commit: unexpected tid (index=%s, indextid="INT64_FMT", logtid="INT64_FMT", pos="INT64_FMT")", tIndex.m_sName.cstr(), tIndex.m_pRT->m_iTID, iTID, iTxnPos ); // cook checkpoint in case dict=keywords if ( tIndex.m_pRT->IsWordDict() && pSeg.Ptr() ) FixupSegmentCheckpoints ( pSeg.Ptr() ); // actually replay tIndex.m_pRT->CommitReplayable ( pSeg.LeakPtr(), dKlist ); // update committed tid on replay in case of unexpected / mismatched tid tIndex.m_pRT->m_iTID = iTID; } // update info tIndex.m_iMinTID = Min ( tIndex.m_iMinTID, iTID ); tIndex.m_iMaxTID = Max ( tIndex.m_iMaxTID, iTID ); tIndex.m_tmMin = Min ( tIndex.m_tmMin, tmStamp ); tIndex.m_tmMax = Max ( tIndex.m_tmMax, tmStamp ); return true; } bool RtBinlog_c::ReplayIndexAdd ( int iBinlog, const SmallStringHash_T & hIndexes, BinlogReader_c & tReader ) const { // load and check index const int64_t iTxnPos = tReader.GetPos(); BinlogFileDesc_t & tLog = m_dLogFiles[iBinlog]; uint64_t uVal = tReader.UnzipValue(); if ( (int)uVal!=tLog.m_dIndexInfos.GetLength() ) sphDie ( "binlog: indexadd: unexpected index id (id="UINT64_FMT", expected=%d, pos="INT64_FMT")", uVal, tLog.m_dIndexInfos.GetLength(), iTxnPos ); // load data CSphString sName = tReader.GetString(); // FIXME? use this for double checking? tReader.UnzipValue (); // TID tReader.UnzipValue (); // time if ( !tReader.CheckCrc ( "indexadd", sName.cstr(), 0, iTxnPos ) ) return false; // check for index name dupes ARRAY_FOREACH ( i, tLog.m_dIndexInfos ) if ( tLog.m_dIndexInfos[i].m_sName==sName ) sphDie ( "binlog: duplicate index name (name=%s, dupeid=%d, pos="INT64_FMT")", sName.cstr(), i, iTxnPos ); // not a dupe, lets add BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos.Add(); tIndex.m_sName = sName; // lookup index in the list of currently served ones CSphIndex ** ppIndex = hIndexes ( sName.cstr() ); CSphIndex * pIndex = ppIndex ? (*ppIndex) : NULL; if ( pIndex ) { tIndex.m_pIndex = pIndex; if ( pIndex->IsRT() ) tIndex.m_pRT = (RtIndex_t*)pIndex; tIndex.m_iPreReplayTID = pIndex->m_iTID; tIndex.m_iFlushedTID = pIndex->m_iTID; } // all ok // TID ranges will be now recomputed as we replay return true; } bool RtBinlog_c::ReplayUpdateAttributes ( int iBinlog, BinlogReader_c & tReader ) const { // load and lookup index const int64_t iTxnPos = tReader.GetPos(); BinlogFileDesc_t & tLog = m_dLogFiles[iBinlog]; BinlogIndexInfo_t & tIndex = ReplayIndexID ( tReader, tLog, "update" ); // load transaction data CSphAttrUpdate tUpd; int64_t iTID = (int64_t) tReader.UnzipValue(); int64_t tmStamp = (int64_t) tReader.UnzipValue(); tUpd.m_dAttrs.Resize ( (DWORD) tReader.UnzipValue() ); // FIXME! sanity check ARRAY_FOREACH ( i, tUpd.m_dAttrs ) { tUpd.m_dAttrs[i].m_sName = tReader.GetString(); tUpd.m_dAttrs[i].m_eAttrType = (ESphAttr) tReader.UnzipValue(); // safe, we'll crc check later } if ( tReader.GetErrorFlag() || !LoadVector ( tReader, tUpd.m_dPool ) || !LoadVector ( tReader, tUpd.m_dDocids ) || !LoadVector ( tReader, tUpd.m_dRowOffset ) || !tReader.CheckCrc ( "update", tIndex.m_sName.cstr(), iTID, iTxnPos ) ) { return false; } // check TID, time order in log if ( iTID tIndex.m_pIndex->m_iTID ) { // we normally expect per-index TIDs to be sequential // but let's be graceful about that if ( iTID!=tIndex.m_pIndex->m_iTID+1 ) sphWarning ( "binlog: update: unexpected tid (index=%s, indextid="INT64_FMT", logtid="INT64_FMT", pos="INT64_FMT")", tIndex.m_sName.cstr(), tIndex.m_pIndex->m_iTID, iTID, iTxnPos ); CSphString sError; tIndex.m_pIndex->UpdateAttributes ( tUpd, -1, sError ); // FIXME! check for errors // update committed tid on replay in case of unexpected / mismatched tid tIndex.m_pIndex->m_iTID = iTID; } // update info tIndex.m_iMinTID = Min ( tIndex.m_iMinTID, iTID ); tIndex.m_iMaxTID = Max ( tIndex.m_iMaxTID, iTID ); tIndex.m_tmMin = Min ( tIndex.m_tmMin, tmStamp ); tIndex.m_tmMax = Max ( tIndex.m_tmMax, tmStamp ); return true; } bool RtBinlog_c::ReplayCacheAdd ( int iBinlog, BinlogReader_c & tReader ) const { const int64_t iTxnPos = tReader.GetPos(); BinlogFileDesc_t & tLog = m_dLogFiles[iBinlog]; // load data CSphVector dCache; dCache.Resize ( (int) tReader.UnzipValue() ); // FIXME! sanity check ARRAY_FOREACH ( i, dCache ) { dCache[i].m_sName = tReader.GetString(); dCache[i].m_iMinTID = tReader.UnzipValue(); dCache[i].m_iMaxTID = tReader.UnzipValue(); dCache[i].m_iFlushedTID = tReader.UnzipValue(); dCache[i].m_tmMin = tReader.UnzipValue(); dCache[i].m_tmMax = tReader.UnzipValue(); } if ( !tReader.CheckCrc ( "cache", "", 0, iTxnPos ) ) return false; // if we arrived here by replay, let's verify everything // note that cached infos just passed checksumming, so the file is supposed to be clean! // in any case, broken log or not, we probably managed to replay something // so let's just report differences as warnings if ( dCache.GetLength()!=tLog.m_dIndexInfos.GetLength() ) { sphWarning ( "binlog: cache mismatch: %d indexes cached, %d replayed", dCache.GetLength(), tLog.m_dIndexInfos.GetLength() ); return true; } ARRAY_FOREACH ( i, dCache ) { BinlogIndexInfo_t & tCache = dCache[i]; BinlogIndexInfo_t & tIndex = tLog.m_dIndexInfos[i]; if ( tCache.m_sName!=tIndex.m_sName ) { sphWarning ( "binlog: cache mismatch: index %d name mismatch (%s cached, %s replayed)", i, tCache.m_sName.cstr(), tIndex.m_sName.cstr() ); continue; } if ( tCache.m_iMinTID!=tIndex.m_iMinTID || tCache.m_iMaxTID!=tIndex.m_iMaxTID ) { sphWarning ( "binlog: cache mismatch: index %s tid ranges mismatch (cached "INT64_FMT" to "INT64_FMT", replayed "INT64_FMT" to "INT64_FMT")", tCache.m_sName.cstr(), tCache.m_iMinTID, tCache.m_iMaxTID, tIndex.m_iMinTID, tIndex.m_iMaxTID ); } } return true; } ////////////////////////////////////////////////////////////////////////// ISphRtIndex * sphGetCurrentIndexRT() { RtAccum_t * pAcc = (RtAccum_t*) sphThreadGet ( g_tTlsAccumKey ); if ( pAcc ) return pAcc->m_pIndex; return NULL; } ISphRtIndex * sphCreateIndexRT ( const CSphSchema & tSchema, const char * sIndexName, DWORD uRamSize, const char * sPath, bool bKeywordDict ) { MEMORY ( SPH_MEM_IDX_RT ); return new RtIndex_t ( tSchema, sIndexName, uRamSize, sPath, bKeywordDict ); } void sphRTInit () { MEMORY ( SPH_MEM_BINLOG ); g_bRTChangesAllowed = false; Verify ( RtSegment_t::m_tSegmentSeq.Init() ); Verify ( sphThreadKeyCreate ( &g_tTlsAccumKey ) ); g_pRtBinlog = new RtBinlog_c(); if ( !g_pRtBinlog ) sphDie ( "binlog: failed to create binlog" ); g_pBinlog = g_pRtBinlog; } void sphRTConfigure ( const CSphConfigSection & hSearchd, bool bTestMode ) { assert ( g_pBinlog ); g_pRtBinlog->Configure ( hSearchd, bTestMode ); g_iRtFlushPeriod = hSearchd.GetInt ( "rt_flush_period", (int)g_iRtFlushPeriod ); // clip period to range ( 10 sec, million years ) g_iRtFlushPeriod = Max ( g_iRtFlushPeriod, 10 ); g_iRtFlushPeriod = Min ( g_iRtFlushPeriod, INT64_MAX ); } void sphRTDone () { sphThreadKeyDelete ( g_tTlsAccumKey ); Verify ( RtSegment_t::m_tSegmentSeq.Done() ); // its valid for "searchd --stop" case SafeDelete ( g_pBinlog ); } void sphReplayBinlog ( const SmallStringHash_T & hIndexes, DWORD uReplayFlags, ProgressCallbackSimple_t * pfnProgressCallback ) { MEMORY ( SPH_MEM_BINLOG ); g_pRtBinlog->Replay ( hIndexes, uReplayFlags, pfnProgressCallback ); g_pRtBinlog->CreateTimerThread(); g_bRTChangesAllowed = true; } bool sphRTSchemaConfigure ( const CSphConfigSection & hIndex, CSphSchema * pSchema, CSphString * pError ) { assert ( pSchema && pError ); CSphColumnInfo tCol; // fields for ( CSphVariant * v=hIndex("rt_field"); v; v=v->m_pNext ) { tCol.m_sName = v->cstr(); tCol.m_sName.ToLower(); pSchema->m_dFields.Add ( tCol ); } if ( !pSchema->m_dFields.GetLength() ) { pError->SetSprintf ( "no fields configured (use rt_field directive)" ); return false; } if ( pSchema->m_dFields.GetLength()>SPH_MAX_FIELDS ) { pError->SetSprintf ( "too many fields (fields=%d, max=%d)", pSchema->m_dFields.GetLength(), SPH_MAX_FIELDS ); return false; } // attrs const int iNumTypes = 7; const char * sTypes[iNumTypes] = { "rt_attr_uint", "rt_attr_bigint", "rt_attr_float", "rt_attr_timestamp", "rt_attr_string", "rt_attr_multi", "rt_attr_multi_64" }; const ESphAttr iTypes[iNumTypes] = { SPH_ATTR_INTEGER, SPH_ATTR_BIGINT, SPH_ATTR_FLOAT, SPH_ATTR_TIMESTAMP, SPH_ATTR_STRING, SPH_ATTR_UINT32SET, SPH_ATTR_UINT64SET }; for ( int iType=0; iTypem_pNext ) { tCol.m_sName = v->cstr(); tCol.m_sName.ToLower(); tCol.m_eAttrType = iTypes[iType]; pSchema->AddAttr ( tCol, false ); } } return true; } // // $Id: sphinxrt.cpp 3128 2012-03-01 01:44:34Z shodan $ // sphinx-2.0.4-release/src/yysphinxselect.c0000644000176700017710000012504711714101214017772 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Written by Richard Stallman by simplifying the original so called ``semantic'' parser. */ /* All symbols defined below should begin with yy or YY, to avoid infringing on user name space. This should be done even for local variables, as they might otherwise be expanded by user macros. There are some unavoidable exceptions within include files to define necessary library symbols; they are noted "INFRINGES ON USER NAME SPACE" below. */ /* Identify Bison output. */ #define YYBISON 1 /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" /* Pure parsers. */ #define YYPURE 1 /* Using locations. */ #define YYLSP_NEEDED 0 /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { SEL_TOKEN = 258, SEL_ID = 259, SEL_AS = 260, SEL_AVG = 261, SEL_MAX = 262, SEL_MIN = 263, SEL_SUM = 264, SEL_COUNT = 265, SEL_WEIGHT = 266, SEL_DISTINCT = 267, TOK_NEG = 268, TOK_LTE = 269, TOK_GTE = 270, TOK_EQ = 271, TOK_NE = 272, TOK_CONST_STRING = 273, TOK_OR = 274, TOK_AND = 275, TOK_NOT = 276 }; #endif #define SEL_TOKEN 258 #define SEL_ID 259 #define SEL_AS 260 #define SEL_AVG 261 #define SEL_MAX 262 #define SEL_MIN 263 #define SEL_SUM 264 #define SEL_COUNT 265 #define SEL_WEIGHT 266 #define SEL_DISTINCT 267 #define TOK_NEG 268 #define TOK_LTE 269 #define TOK_GTE 270 #define TOK_EQ 271 #define TOK_NE 272 #define TOK_CONST_STRING 273 #define TOK_OR 274 #define TOK_AND 275 #define TOK_NOT 276 /* Copy the first part of user declarations. */ #if USE_WINDOWS #pragma warning(push,1) #pragma warning(disable:4702) // unreachable code #endif /* Enabling traces. */ #ifndef YYDEBUG # define YYDEBUG 0 #endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE # undef YYERROR_VERBOSE # define YYERROR_VERBOSE 1 #else # define YYERROR_VERBOSE 1 #endif #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef int YYSTYPE; # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif /* Copy the second part of user declarations. */ /* Line 214 of yacc.c. */ #if ! defined (yyoverflow) || YYERROR_VERBOSE /* The parser invokes alloca or malloc; define the necessary symbols. */ # if YYSTACK_USE_ALLOCA # define YYSTACK_ALLOC alloca # else # ifndef YYSTACK_USE_ALLOCA # if defined (alloca) || defined (_ALLOCA_H) # define YYSTACK_ALLOC alloca # else # ifdef __GNUC__ # define YYSTACK_ALLOC __builtin_alloca # endif # endif # endif # endif # ifdef YYSTACK_ALLOC /* Pacify GCC's `empty if-body' warning. */ # define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) # else # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif # define YYSTACK_ALLOC malloc # define YYSTACK_FREE free # endif #endif /* ! defined (yyoverflow) || YYERROR_VERBOSE */ #if (! defined (yyoverflow) \ && (! defined (__cplusplus) \ || (YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc { short yyss; YYSTYPE yyvs; }; /* The size of the maximum gap between one aligned stack and the next. */ # define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1) /* The size of an array large to enough to hold all stacks, each with N elements. */ # define YYSTACK_BYTES(N) \ ((N) * (sizeof (short) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) /* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if 1 < __GNUC__ # define YYCOPY(To, From, Count) \ __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else # define YYCOPY(To, From, Count) \ do \ { \ register YYSIZE_T yyi; \ for (yyi = 0; yyi < (Count); yyi++) \ (To)[yyi] = (From)[yyi]; \ } \ while (0) # endif # endif /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ # define YYSTACK_RELOCATE(Stack) \ do \ { \ YYSIZE_T yynewbytes; \ YYCOPY (&yyptr->Stack, Stack, yysize); \ Stack = &yyptr->Stack; \ yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ yyptr += yynewbytes / sizeof (*yyptr); \ } \ while (0) #endif #if defined (__STDC__) || defined (__cplusplus) typedef signed char yysigned_char; #else typedef short yysigned_char; #endif /* YYFINAL -- State number of the termination state. */ #define YYFINAL 31 /* YYLAST -- Last index in YYTABLE. */ #define YYLAST 360 /* YYNTOKENS -- Number of terminals. */ #define YYNTOKENS 33 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 10 /* YYNRULES -- Number of rules. */ #define YYNRULES 45 /* YYNRULES -- Number of states. */ #define YYNSTATES 99 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 276 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const unsigned char yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 22, 2, 31, 32, 27, 25, 30, 26, 2, 28, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 23, 2, 24, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 21, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 29 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ static const unsigned char yyprhs[] = { 0, 0, 3, 5, 9, 11, 14, 15, 17, 20, 22, 27, 32, 37, 42, 47, 51, 57, 59, 62, 65, 69, 73, 77, 81, 85, 89, 93, 97, 101, 105, 109, 113, 117, 121, 125, 127, 129, 131, 136, 140, 147, 154, 156, 160, 162 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yysigned_char yyrhs[] = { 34, 0, -1, 35, -1, 34, 30, 35, -1, 27, -1, 37, 36, -1, -1, 3, -1, 5, 3, -1, 38, -1, 6, 31, 38, 32, -1, 7, 31, 38, 32, -1, 8, 31, 38, 32, -1, 9, 31, 38, 32, -1, 10, 31, 27, 32, -1, 11, 31, 32, -1, 10, 31, 12, 3, 32, -1, 39, -1, 26, 38, -1, 29, 38, -1, 38, 25, 38, -1, 38, 26, 38, -1, 38, 27, 38, -1, 38, 28, 38, -1, 38, 23, 38, -1, 38, 24, 38, -1, 38, 21, 38, -1, 38, 22, 38, -1, 38, 14, 38, -1, 38, 15, 38, -1, 38, 16, 38, -1, 38, 17, 38, -1, 38, 20, 38, -1, 38, 19, 38, -1, 31, 38, 32, -1, 40, -1, 4, -1, 3, -1, 3, 31, 41, 32, -1, 3, 31, 32, -1, 8, 31, 38, 30, 38, 32, -1, 7, 31, 38, 30, 38, 32, -1, 42, -1, 41, 30, 42, -1, 38, -1, 18, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const unsigned char yyrline[] = { 0, 45, 45, 46, 50, 51, 53, 55, 56, 60, 61, 62, 63, 64, 65, 66, 67, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 95, 96, 99, 100, 101, 102, 106, 107, 111, 112 }; #endif #if YYDEBUG || YYERROR_VERBOSE /* YYTNME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = { "$end", "error", "$undefined", "SEL_TOKEN", "SEL_ID", "SEL_AS", "SEL_AVG", "SEL_MAX", "SEL_MIN", "SEL_SUM", "SEL_COUNT", "SEL_WEIGHT", "SEL_DISTINCT", "TOK_NEG", "TOK_LTE", "TOK_GTE", "TOK_EQ", "TOK_NE", "TOK_CONST_STRING", "TOK_OR", "TOK_AND", "'|'", "'&'", "'<'", "'>'", "'+'", "'-'", "'*'", "'/'", "TOK_NOT", "','", "'('", "')'", "$accept", "select_list", "select_item", "opt_alias", "select_expr", "expr", "select_atom", "function", "arglist", "arg", 0 }; #endif # ifdef YYPRINT /* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to token YYLEX-NUM. */ static const unsigned short yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 124, 38, 60, 62, 43, 45, 42, 47, 276, 44, 40, 41 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const unsigned char yyr1[] = { 0, 33, 34, 34, 35, 35, 36, 36, 36, 37, 37, 37, 37, 37, 37, 37, 37, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 39, 39, 40, 40, 40, 40, 41, 41, 42, 42 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ static const unsigned char yyr2[] = { 0, 2, 1, 3, 1, 2, 0, 1, 2, 1, 4, 4, 4, 4, 4, 3, 5, 1, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 4, 3, 6, 6, 1, 3, 1, 1 }; /* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state STATE-NUM when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const unsigned char yydefact[] = { 0, 37, 36, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 2, 6, 9, 17, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 19, 0, 1, 0, 7, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 39, 44, 0, 42, 0, 0, 0, 0, 0, 0, 15, 0, 0, 34, 3, 8, 28, 29, 30, 31, 33, 32, 26, 27, 24, 25, 20, 21, 22, 23, 0, 38, 10, 0, 11, 0, 12, 13, 0, 14, 0, 0, 43, 0, 0, 16, 41, 40 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yysigned_char yydefgoto[] = { -1, 13, 14, 35, 15, 16, 17, 18, 53, 54 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ #define YYPACT_NINF -22 static const short yypact[] = { 52, -7, -22, -5, 10, 13, 33, 35, 36, 87, -22, 87, 87, 15, -22, 20, 272, -22, -22, 39, 87, 87, 87, 87, -11, 17, 38, 42, -22, -22, 143, -22, 52, -22, 71, -22, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, -22, -22, 272, 18, -22, 162, 105, 124, 181, 73, 48, -22, 87, 87, -22, -22, -22, -21, -21, -6, -6, 287, 302, 317, 332, -21, -21, 24, 24, -22, -22, 81, -22, -22, 87, -22, 87, -22, -22, 50, -22, 238, 255, -22, 200, 219, -22, -22, -22 }; /* YYPGOTO[NTERM-NUM]. */ static const yysigned_char yypgoto[] = { -22, -22, 54, -22, -22, -9, -22, -22, -22, 6 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which number is the opposite. If zero, do what YYDEFACT says. If YYTABLE_NINF, syntax error. */ #define YYTABLE_NINF -1 static const unsigned char yytable[] = { 28, 59, 29, 30, 46, 47, 48, 49, 36, 37, 52, 55, 56, 57, 58, 31, 60, 44, 45, 46, 47, 48, 49, 33, 19, 34, 20, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 21, 1, 2, 22, 32, 26, 27, 81, 61, 82, 48, 49, 91, 92, 1, 2, 50, 3, 4, 5, 6, 7, 8, 23, 9, 24, 25, 11, 62, 12, 51, 52, 63, 66, 94, 89, 95, 9, 10, 90, 11, 96, 12, 1, 2, 65, 93, 26, 27, 1, 2, 0, 0, 26, 27, 0, 0, 0, 50, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 11, 0, 12, 9, 0, 0, 11, 0, 12, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 84, 0, 85, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 86, 0, 87, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 0, 0, 64, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 0, 0, 83, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 0, 0, 88, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 0, 0, 97, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 0, 0, 98, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 84, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 0, 86, 36, 37, 38, 39, 0, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 36, 37, 38, 39, 0, 0, 41, 42, 43, 44, 45, 46, 47, 48, 49, 36, 37, 38, 39, 0, 0, 0, 42, 43, 44, 45, 46, 47, 48, 49, 36, 37, 38, 39, 0, 0, 0, 0, 43, 44, 45, 46, 47, 48, 49, 36, 37, 38, 39, 0, 0, 0, 0, 0, 44, 45, 46, 47, 48, 49 }; static const yysigned_char yycheck[] = { 9, 12, 11, 12, 25, 26, 27, 28, 14, 15, 19, 20, 21, 22, 23, 0, 27, 23, 24, 25, 26, 27, 28, 3, 31, 5, 31, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 31, 3, 4, 31, 30, 7, 8, 30, 32, 32, 27, 28, 62, 63, 3, 4, 18, 6, 7, 8, 9, 10, 11, 31, 26, 31, 31, 29, 31, 31, 32, 81, 31, 3, 84, 3, 86, 26, 27, 32, 29, 32, 31, 3, 4, 32, 81, 7, 8, 3, 4, -1, -1, 7, 8, -1, -1, -1, 18, -1, -1, -1, -1, -1, -1, -1, 26, -1, -1, 29, -1, 31, 26, -1, -1, 29, -1, 31, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, 30, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, 30, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, -1, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, -1, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, -1, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, -1, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, -1, -1, 32, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, 30, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, -1, 30, 14, 15, 16, 17, -1, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 14, 15, 16, 17, -1, -1, 20, 21, 22, 23, 24, 25, 26, 27, 28, 14, 15, 16, 17, -1, -1, -1, 21, 22, 23, 24, 25, 26, 27, 28, 14, 15, 16, 17, -1, -1, -1, -1, 22, 23, 24, 25, 26, 27, 28, 14, 15, 16, 17, -1, -1, -1, -1, -1, 23, 24, 25, 26, 27, 28 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const unsigned char yystos[] = { 0, 3, 4, 6, 7, 8, 9, 10, 11, 26, 27, 29, 31, 34, 35, 37, 38, 39, 40, 31, 31, 31, 31, 31, 31, 31, 7, 8, 38, 38, 38, 0, 30, 3, 5, 36, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 18, 32, 38, 41, 42, 38, 38, 38, 38, 12, 27, 32, 31, 31, 32, 35, 3, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 30, 32, 32, 30, 32, 30, 32, 32, 3, 32, 38, 38, 42, 38, 38, 32, 32, 32 }; #if ! defined (YYSIZE_T) && defined (__SIZE_TYPE__) # define YYSIZE_T __SIZE_TYPE__ #endif #if ! defined (YYSIZE_T) && defined (size_t) # define YYSIZE_T size_t #endif #if ! defined (YYSIZE_T) # if defined (__STDC__) || defined (__cplusplus) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # endif #endif #if ! defined (YYSIZE_T) # define YYSIZE_T unsigned int #endif #define yyerrok (yyerrstatus = 0) #define yyclearin (yychar = YYEMPTY) #define YYEMPTY (-2) #define YYEOF 0 #define YYACCEPT goto yyacceptlab #define YYABORT goto yyabortlab #define YYERROR goto yyerrlab1 /* Like YYERROR except do call yyerror. This remains here temporarily to ease the transition to the new meaning of YYERROR, for GCC. Once GCC version 2 has supplanted version 1, this can go. */ #define YYFAIL goto yyerrlab #define YYRECOVERING() (!!yyerrstatus) #define YYBACKUP(Token, Value) \ do \ if (yychar == YYEMPTY && yylen == 1) \ { \ yychar = (Token); \ yylval = (Value); \ yytoken = YYTRANSLATE (yychar); \ YYPOPSTACK; \ goto yybackup; \ } \ else \ { \ yyerror (pParser, "syntax error: cannot back up");\ YYERROR; \ } \ while (0) #define YYTERROR 1 #define YYERRCODE 256 /* YYLLOC_DEFAULT -- Compute the default location (before the actions are run). */ #ifndef YYLLOC_DEFAULT # define YYLLOC_DEFAULT(Current, Rhs, N) \ Current.first_line = Rhs[1].first_line; \ Current.first_column = Rhs[1].first_column; \ Current.last_line = Rhs[N].last_line; \ Current.last_column = Rhs[N].last_column; #endif /* YYLEX -- calling `yylex' with the right arguments. */ #ifdef YYLEX_PARAM # define YYLEX yylex (&yylval, YYLEX_PARAM) #else # define YYLEX yylex (&yylval, pParser) #endif /* Enable debugging if requested. */ #if YYDEBUG # ifndef YYFPRINTF # include /* INFRINGES ON USER NAME SPACE */ # define YYFPRINTF fprintf # endif # define YYDPRINTF(Args) \ do { \ if (yydebug) \ YYFPRINTF Args; \ } while (0) # define YYDSYMPRINT(Args) \ do { \ if (yydebug) \ yysymprint Args; \ } while (0) # define YYDSYMPRINTF(Title, Token, Value, Location) \ do { \ if (yydebug) \ { \ YYFPRINTF (stderr, "%s ", Title); \ yysymprint (stderr, \ Token, Value); \ YYFPRINTF (stderr, "\n"); \ } \ } while (0) /*------------------------------------------------------------------. | yy_stack_print -- Print the state stack from its BOTTOM up to its | | TOP (cinluded). | `------------------------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_stack_print (short *bottom, short *top) #else static void yy_stack_print (bottom, top) short *bottom; short *top; #endif { YYFPRINTF (stderr, "Stack now"); for (/* Nothing. */; bottom <= top; ++bottom) YYFPRINTF (stderr, " %d", *bottom); YYFPRINTF (stderr, "\n"); } # define YY_STACK_PRINT(Bottom, Top) \ do { \ if (yydebug) \ yy_stack_print ((Bottom), (Top)); \ } while (0) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yy_reduce_print (int yyrule) #else static void yy_reduce_print (yyrule) int yyrule; #endif { int yyi; unsigned int yylineno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %u), ", yyrule - 1, yylineno); /* Print the symbols being reduced, and their result. */ for (yyi = yyprhs[yyrule]; 0 <= yyrhs[yyi]; yyi++) YYFPRINTF (stderr, "%s ", yytname [yyrhs[yyi]]); YYFPRINTF (stderr, "-> %s\n", yytname [yyr1[yyrule]]); } # define YY_REDUCE_PRINT(Rule) \ do { \ if (yydebug) \ yy_reduce_print (Rule); \ } while (0) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ int yydebug; #else /* !YYDEBUG */ # define YYDPRINTF(Args) # define YYDSYMPRINT(Args) # define YYDSYMPRINTF(Title, Token, Value, Location) # define YY_STACK_PRINT(Bottom, Top) # define YY_REDUCE_PRINT(Rule) #endif /* !YYDEBUG */ /* YYINITDEPTH -- initial size of the parser's stacks. */ #ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif /* YYMAXDEPTH -- maximum size the stacks can grow to (effective only if the built-in stack extension method is used). Do not make this value too large; the results are undefined if SIZE_MAX < YYSTACK_BYTES (YYMAXDEPTH) evaluated with infinite-precision integer arithmetic. */ #if YYMAXDEPTH == 0 # undef YYMAXDEPTH #endif #ifndef YYMAXDEPTH # define YYMAXDEPTH 10000 #endif #if YYERROR_VERBOSE # ifndef yystrlen # if defined (__GLIBC__) && defined (_STRING_H) # define yystrlen strlen # else /* Return the length of YYSTR. */ static YYSIZE_T # if defined (__STDC__) || defined (__cplusplus) yystrlen (const char *yystr) # else yystrlen (yystr) const char *yystr; # endif { register const char *yys = yystr; while (*yys++ != '\0') continue; return yys - yystr - 1; } # endif # endif # ifndef yystpcpy # if defined (__GLIBC__) && defined (_STRING_H) && defined (_GNU_SOURCE) # define yystpcpy stpcpy # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ static char * # if defined (__STDC__) || defined (__cplusplus) yystpcpy (char *yydest, const char *yysrc) # else yystpcpy (yydest, yysrc) char *yydest; const char *yysrc; # endif { register char *yyd = yydest; register const char *yys = yysrc; while ((*yyd++ = *yys++) != '\0') continue; return yyd - 1; } # endif # endif #endif /* !YYERROR_VERBOSE */ #if YYDEBUG /*--------------------------------. | Print this symbol on YYOUTPUT. | `--------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yysymprint (FILE *yyoutput, int yytype, YYSTYPE *yyvaluep) #else static void yysymprint (yyoutput, yytype, yyvaluep) FILE *yyoutput; int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; if (yytype < YYNTOKENS) { YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); # ifdef YYPRINT YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); # endif } else YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); switch (yytype) { default: break; } YYFPRINTF (yyoutput, ")"); } #endif /* ! YYDEBUG */ /*-----------------------------------------------. | Release the memory associated to this symbol. | `-----------------------------------------------*/ #if defined (__STDC__) || defined (__cplusplus) static void yydestruct (int yytype, YYSTYPE *yyvaluep) #else static void yydestruct (yytype, yyvaluep) int yytype; YYSTYPE *yyvaluep; #endif { /* Pacify ``unused variable'' warnings. */ (void) yyvaluep; switch (yytype) { default: break; } } /* Prevent warnings from -Wmissing-prototypes. */ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM); # else int yyparse (); # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( SelectParser_t * pParser ); #else int yyparse (); #endif #endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ #ifdef YYPARSE_PARAM # if defined (__STDC__) || defined (__cplusplus) int yyparse (void *YYPARSE_PARAM) # else int yyparse (YYPARSE_PARAM) void *YYPARSE_PARAM; # endif #else /* ! YYPARSE_PARAM */ #if defined (__STDC__) || defined (__cplusplus) int yyparse ( SelectParser_t * pParser ) #else int yyparse (pParser) SelectParser_t * pParser ; #endif #endif { /* The lookahead symbol. */ int yychar; /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; register int yystate; register int yyn; int yyresult; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; /* Lookahead token as an internal (translated) token number. */ int yytoken = 0; /* Three stacks and their tools: `yyss': related to states, `yyvs': related to semantic values, `yyls': related to locations. Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ short yyssa[YYINITDEPTH]; short *yyss = yyssa; register short *yyssp; /* The semantic value stack. */ YYSTYPE yyvsa[YYINITDEPTH]; YYSTYPE *yyvs = yyvsa; register YYSTYPE *yyvsp; #define YYPOPSTACK (yyvsp--, yyssp--) YYSIZE_T yystacksize = YYINITDEPTH; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; /* When reducing, the number of symbols on the RHS of the reduced rule. */ int yylen; YYDPRINTF ((stderr, "Starting parse\n")); yystate = 0; yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ /* Initialize stack pointers. Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ yyssp = yyss; yyvsp = yyvs; goto yysetstate; /*------------------------------------------------------------. | yynewstate -- Push a new state, which is found in yystate. | `------------------------------------------------------------*/ yynewstate: /* In all cases, when you get here, the value and location stacks have just been pushed. so pushing a state here evens the stacks. */ yyssp++; yysetstate: *yyssp = yystate; if (yyss + yystacksize - 1 <= yyssp) { /* Get the current used size of the three stacks, in elements. */ YYSIZE_T yysize = yyssp - yyss + 1; #ifdef yyoverflow { /* Give user a chance to reallocate the stack. Use copies of these so that the &'s don't force the real ones into memory. */ YYSTYPE *yyvs1 = yyvs; short *yyss1 = yyss; /* Each stack pointer address is followed by the size of the data in use in that stack, in bytes. This used to be a conditional around just the two extra args, but that might be undefined if yyoverflow is a macro. */ yyoverflow ("parser stack overflow", &yyss1, yysize * sizeof (*yyssp), &yyvs1, yysize * sizeof (*yyvsp), &yystacksize); yyss = yyss1; yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE goto yyoverflowlab; # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) goto yyoverflowlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) yystacksize = YYMAXDEPTH; { short *yyss1 = yyss; union yyalloc *yyptr = (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); if (! yyptr) goto yyoverflowlab; YYSTACK_RELOCATE (yyss); YYSTACK_RELOCATE (yyvs); # undef YYSTACK_RELOCATE if (yyss1 != yyssa) YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ yyssp = yyss + yysize - 1; yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); goto yybackup; /*-----------. | yybackup. | `-----------*/ yybackup: /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* yyresume: */ /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; if (yyn == YYPACT_NINF) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */ if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); yychar = YYLEX; } if (yychar <= YYEOF) { yychar = yytoken = YYEOF; YYDPRINTF ((stderr, "Now at end of input.\n")); } else { yytoken = YYTRANSLATE (yychar); YYDSYMPRINTF ("Next token is", yytoken, &yylval, &yylloc); } /* If the proper action on seeing token YYTOKEN is to reduce or to detect an error, take that action. */ yyn += yytoken; if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) goto yydefault; yyn = yytable[yyn]; if (yyn <= 0) { if (yyn == 0 || yyn == YYTABLE_NINF) goto yyerrlab; yyn = -yyn; goto yyreduce; } if (yyn == YYFINAL) YYACCEPT; /* Shift the lookahead token. */ YYDPRINTF ((stderr, "Shifting token %s, ", yytname[yytoken])); /* Discard the token being shifted unless it is eof. */ if (yychar != YYEOF) yychar = YYEMPTY; *++yyvsp = yylval; /* Count tokens shifted since error; after three, turn off error status. */ if (yyerrstatus) yyerrstatus--; yystate = yyn; goto yynewstate; /*-----------------------------------------------------------. | yydefault -- do the default action for the current state. | `-----------------------------------------------------------*/ yydefault: yyn = yydefact[yystate]; if (yyn == 0) goto yyerrlab; goto yyreduce; /*-----------------------------. | yyreduce -- Do a reduction. | `-----------------------------*/ yyreduce: /* yyn is the number of a rule to reduce with. */ yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison users should not rely upon it. Assigning to YYVAL unconditionally makes the parser a bit smaller, and it avoids a GCC warning that YYVAL may be used uninitialized. */ yyval = yyvsp[1-yylen]; YY_REDUCE_PRINT (yyn); switch (yyn) { case 4: { pParser->AddItem ( &yyvsp[0] ); ;} break; case 7: { pParser->AliasLastItem ( &yyvsp[0] ); ;} break; case 8: { pParser->AliasLastItem ( &yyvsp[0] ); ;} break; case 9: { pParser->AddItem ( &yyvsp[0] ); ;} break; case 10: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_AVG, &yyvsp[-3], &yyvsp[0] ); ;} break; case 11: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_MAX, &yyvsp[-3], &yyvsp[0] ); ;} break; case 12: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_MIN, &yyvsp[-3], &yyvsp[0] ); ;} break; case 13: { pParser->AddItem ( &yyvsp[-1], SPH_AGGR_SUM, &yyvsp[-3], &yyvsp[0] ); ;} break; case 14: { pParser->AddItem ( "count(*)", &yyvsp[-3], &yyvsp[0] ); ;} break; case 15: { pParser->AddItem ( "weight()", &yyvsp[-2], &yyvsp[0] ); ;} break; case 16: { pParser->AddItem ( "@distinct", &yyvsp[-4], &yyvsp[0] ); ;} break; case 18: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 19: { yyval = yyvsp[-1]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 20: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 21: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 22: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 23: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 24: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 25: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 26: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 27: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 28: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 29: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 30: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 31: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 32: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 33: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 34: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 38: { yyval = yyvsp[-3]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 39: { yyval = yyvsp[-2]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 40: { yyval = yyvsp[-5]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; case 41: { yyval = yyvsp[-5]; yyval.m_iEnd = yyvsp[0].m_iEnd; ;} break; } /* Line 991 of yacc.c. */ yyvsp -= yylen; yyssp -= yylen; YY_STACK_PRINT (yyss, yyssp); *++yyvsp = yyval; /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = yyr1[yyn]; yystate = yypgoto[yyn - YYNTOKENS] + *yyssp; if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp) yystate = yytable[yystate]; else yystate = yydefgoto[yyn - YYNTOKENS]; goto yynewstate; /*------------------------------------. | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { ++yynerrs; #if YYERROR_VERBOSE yyn = yypact[yystate]; if (YYPACT_NINF < yyn && yyn < YYLAST) { YYSIZE_T yysize = 0; int yytype = YYTRANSLATE (yychar); char *yymsg; int yyx, yycount; yycount = 0; /* Start YYX at -YYN if negative to avoid negative indexes in YYCHECK. */ for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) yysize += yystrlen (yytname[yyx]) + 15, yycount++; yysize += yystrlen ("syntax error, unexpected ") + 1; yysize += yystrlen (yytname[yytype]); yymsg = (char *) YYSTACK_ALLOC (yysize); if (yymsg != 0) { char *yyp = yystpcpy (yymsg, "syntax error, unexpected "); yyp = yystpcpy (yyp, yytname[yytype]); if (yycount < 5) { yycount = 0; for (yyx = yyn < 0 ? -yyn : 0; yyx < (int) (sizeof (yytname) / sizeof (char *)); yyx++) if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) { const char *yyq = ! yycount ? ", expecting " : " or "; yyp = yystpcpy (yyp, yyq); yyp = yystpcpy (yyp, yytname[yyx]); yycount++; } } yyerror (pParser, yymsg); YYSTACK_FREE (yymsg); } else yyerror (pParser, "syntax error; also virtual memory exhausted"); } else #endif /* YYERROR_VERBOSE */ yyerror (pParser, "syntax error"); } if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an error, discard it. */ /* Return failure if at end of input. */ if (yychar == YYEOF) { /* Pop the error token. */ YYPOPSTACK; /* Pop the rest of the stack. */ while (yyss < yyssp) { YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[*yyssp], yyvsp); YYPOPSTACK; } YYABORT; } YYDSYMPRINTF ("Error: discarding", yytoken, &yylval, &yylloc); yydestruct (yytoken, &yylval); yychar = YYEMPTY; } /* Else will try to reuse lookahead token after shifting the error token. */ goto yyerrlab2; /*----------------------------------------------------. | yyerrlab1 -- error raised explicitly by an action. | `----------------------------------------------------*/ //yyerrlab1: /* Suppress GCC warning that yyerrlab1 is unused when no action invokes YYERROR. */ #if defined (__GNUC_MINOR__) && 2093 <= (__GNUC__ * 1000 + __GNUC_MINOR__) // __attribute__ ((__unused__)) #endif goto yyerrlab2; /*---------------------------------------------------------------. | yyerrlab2 -- pop states until the error token can be shifted. | `---------------------------------------------------------------*/ yyerrlab2: yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (yyn != YYPACT_NINF) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) { yyn = yytable[yyn]; if (0 < yyn) break; } } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) YYABORT; YYDSYMPRINTF ("Error: popping", yystos[*yyssp], yyvsp, yylsp); yydestruct (yystos[yystate], yyvsp); yyvsp--; yystate = *--yyssp; YY_STACK_PRINT (yyss, yyssp); } if (yyn == YYFINAL) YYACCEPT; YYDPRINTF ((stderr, "Shifting error token, ")); *++yyvsp = yylval; yystate = yyn; goto yynewstate; /*-------------------------------------. | yyacceptlab -- YYACCEPT comes here. | `-------------------------------------*/ yyacceptlab: yyresult = 0; goto yyreturn; /*-----------------------------------. | yyabortlab -- YYABORT comes here. | `-----------------------------------*/ yyabortlab: yyresult = 1; goto yyreturn; #ifndef yyoverflow /*----------------------------------------------. | yyoverflowlab -- parser overflow comes here. | `----------------------------------------------*/ yyoverflowlab: yyerror (pParser, "parser stack overflow"); yyresult = 2; /* Fall through. */ #endif yyreturn: #ifndef yyoverflow if (yyss != yyssa) YYSTACK_FREE (yyss); #endif return yyresult; } #if USE_WINDOWS #pragma warning(pop) #endif sphinx-2.0.4-release/src/sphinxutils.cpp0000644000176700017710000012160011720201600017615 0ustar deogardeogar// // $Id: sphinxutils.cpp 3109 2012-02-19 14:13:20Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // /// @file sphinxutils.cpp /// Implementations for Sphinx utilities shared classes. #include "sphinx.h" #include "sphinxutils.h" #include #include #include #if HAVE_EXECINFO_H #include #endif #if USE_WINDOWS #include // for ::open on windows #include #pragma comment(linker, "/defaultlib:dbghelp.lib") #pragma message("Automatically linking with dbghelp.lib") #else #include #include #endif ///////////////////////////////////////////////////////////////////////////// static char * ltrim ( char * sLine ) { while ( *sLine && isspace(*sLine) ) sLine++; return sLine; } static char * rtrim ( char * sLine ) { char * p = sLine + strlen(sLine) - 1; while ( p>=sLine && isspace(*p) ) p--; p[1] = '\0'; return sLine; } static char * trim ( char * sLine ) { return ltrim ( rtrim ( sLine ) ); } ////////////////////////////////////////////////////////////////////////// int CSphConfigSection::GetSize ( const char * sKey, int iDefault ) const { CSphVariant * pEntry = (*this)( sKey ); if ( !pEntry ) return iDefault; char sMemLimit[256]; strncpy ( sMemLimit, pEntry->cstr(), sizeof(sMemLimit) ); sMemLimit [ sizeof(sMemLimit)-1 ] = '\0'; int iLen = strlen ( sMemLimit ); if ( !iLen ) return iDefault; iLen--; int iScale = 1; if ( toupper ( sMemLimit[iLen] )=='K' ) { iScale = 1024; sMemLimit[iLen] = '\0'; } else if ( toupper ( sMemLimit[iLen] )=='M' ) { iScale = 1048576; sMemLimit[iLen] = '\0'; } char * sErr; int64_t iRes = strtoll ( sMemLimit, &sErr, 10 ); if ( !*sErr ) { iRes *= iScale; if ( iRes>INT_MAX ) { sphWarning ( "'%s = %s' clamped to INT_MAX", sKey, pEntry->cstr() ); iRes = INT_MAX; } } else { sphWarning ( "'%s = %s' parse error '%s'", sKey, pEntry->cstr(), sErr ); iRes = iDefault; } return (int)iRes; } ////////////////////////////////////////////////////////////////////////// // CONFIG PARSER ////////////////////////////////////////////////////////////////////////// /// key flags enum { KEY_DEPRECATED = 1UL<<0, KEY_LIST = 1UL<<1 }; /// key descriptor for validation purposes struct KeyDesc_t { const char * m_sKey; ///< key name int m_iFlags; ///< flags const char * m_sExtra; ///< extra stuff (deprecated name, for now) }; /// allowed keys for source section static KeyDesc_t g_dKeysSource[] = { { "type", 0, NULL }, { "strip_html", KEY_DEPRECATED, "html_strip (per-index)" }, { "index_html_attrs", KEY_DEPRECATED, "html_index_attrs (per-index)" }, { "sql_host", 0, NULL }, { "sql_user", 0, NULL }, { "sql_pass", 0, NULL }, { "sql_db", 0, NULL }, { "sql_port", 0, NULL }, { "sql_sock", 0, NULL }, { "mysql_connect_flags", 0, NULL }, { "mysql_ssl_key", 0, NULL }, { "mysql_ssl_cert", 0, NULL }, { "mysql_ssl_ca", 0, NULL }, { "mssql_winauth", 0, NULL }, { "mssql_unicode", 0, NULL }, { "sql_query_pre", KEY_LIST, NULL }, { "sql_query", 0, NULL }, { "sql_query_range", 0, NULL }, { "sql_range_step", 0, NULL }, { "sql_query_killlist", 0, NULL }, { "sql_attr_uint", KEY_LIST, NULL }, { "sql_attr_bool", KEY_LIST, NULL }, { "sql_attr_timestamp", KEY_LIST, NULL }, { "sql_attr_str2ordinal", KEY_LIST, NULL }, { "sql_attr_float", KEY_LIST, NULL }, { "sql_attr_bigint", KEY_LIST, NULL }, { "sql_attr_multi", KEY_LIST, NULL }, { "sql_query_post", KEY_LIST, NULL }, { "sql_query_post_index", KEY_LIST, NULL }, { "sql_ranged_throttle", 0, NULL }, { "sql_query_info", 0, NULL }, { "xmlpipe_command", 0, NULL }, { "xmlpipe_field", KEY_LIST, NULL }, { "xmlpipe_attr_uint", KEY_LIST, NULL }, { "xmlpipe_attr_timestamp", KEY_LIST, NULL }, { "xmlpipe_attr_str2ordinal", KEY_LIST, NULL }, { "xmlpipe_attr_bool", KEY_LIST, NULL }, { "xmlpipe_attr_float", KEY_LIST, NULL }, { "xmlpipe_attr_bigint", KEY_LIST, NULL }, { "xmlpipe_attr_multi", KEY_LIST, NULL }, { "xmlpipe_attr_multi_64", KEY_LIST, NULL }, { "xmlpipe_attr_string", KEY_LIST, NULL }, { "xmlpipe_attr_wordcount", KEY_LIST, NULL }, { "xmlpipe_field_string", KEY_LIST, NULL }, { "xmlpipe_field_wordcount", KEY_LIST, NULL }, { "xmlpipe_fixup_utf8", 0, NULL }, { "sql_group_column", KEY_LIST | KEY_DEPRECATED, "sql_attr_uint" }, { "sql_date_column", KEY_LIST | KEY_DEPRECATED, "sql_attr_timestamp" }, { "sql_str2ordinal_column", KEY_LIST | KEY_DEPRECATED, "sql_attr_str2ordinal" }, { "unpack_zlib", KEY_LIST, NULL }, { "unpack_mysqlcompress", KEY_LIST, NULL }, { "unpack_mysqlcompress_maxsize", 0, NULL }, { "odbc_dsn", 0, NULL }, { "sql_joined_field", KEY_LIST, NULL }, { "sql_attr_string", KEY_LIST, NULL }, { "sql_attr_str2wordcount", KEY_LIST, NULL }, { "sql_field_string", KEY_LIST, NULL }, { "sql_field_str2wordcount", KEY_LIST, NULL }, { "sql_file_field", KEY_LIST, NULL }, { "sql_column_buffers", 0, NULL }, { NULL, 0, NULL } }; /// allowed keys for index section static KeyDesc_t g_dKeysIndex[] = { { "source", KEY_LIST, NULL }, { "path", 0, NULL }, { "docinfo", 0, NULL }, { "mlock", 0, NULL }, { "morphology", 0, NULL }, { "stopwords", 0, NULL }, { "synonyms", KEY_DEPRECATED, "exceptions" }, { "exceptions", 0, NULL }, { "wordforms", 0, NULL }, { "min_word_len", 0, NULL }, { "charset_type", 0, NULL }, { "charset_table", 0, NULL }, { "ignore_chars", 0, NULL }, { "min_prefix_len", 0, NULL }, { "min_infix_len", 0, NULL }, { "prefix_fields", 0, NULL }, { "infix_fields", 0, NULL }, { "enable_star", 0, NULL }, { "ngram_len", 0, NULL }, { "ngram_chars", 0, NULL }, { "phrase_boundary", 0, NULL }, { "phrase_boundary_step", 0, NULL }, { "ondisk_dict", 0, NULL }, { "type", 0, NULL }, { "local", KEY_LIST, NULL }, { "agent", KEY_LIST, NULL }, { "agent_blackhole", KEY_LIST, NULL }, { "agent_connect_timeout", 0, NULL }, { "agent_query_timeout", 0, NULL }, { "html_strip", 0, NULL }, { "html_index_attrs", 0, NULL }, { "html_remove_elements", 0, NULL }, { "preopen", 0, NULL }, { "inplace_enable", 0, NULL }, { "inplace_hit_gap", 0, NULL }, { "inplace_docinfo_gap", 0, NULL }, { "inplace_reloc_factor", 0, NULL }, { "inplace_write_factor", 0, NULL }, { "index_exact_words", 0, NULL }, { "min_stemming_len", 0, NULL }, { "overshort_step", 0, NULL }, { "stopword_step", 0, NULL }, { "blend_chars", 0, NULL }, { "expand_keywords", 0, NULL }, { "hitless_words", KEY_LIST, NULL }, { "hit_format", 0, NULL }, { "rt_field", KEY_LIST, NULL }, { "rt_attr_uint", KEY_LIST, NULL }, { "rt_attr_bigint", KEY_LIST, NULL }, { "rt_attr_float", KEY_LIST, NULL }, { "rt_attr_timestamp", KEY_LIST, NULL }, { "rt_attr_string", KEY_LIST, NULL }, { "rt_attr_multi", KEY_LIST, NULL }, { "rt_attr_multi_64", KEY_LIST, NULL }, { "rt_mem_limit", 0, NULL }, { "dict", 0, NULL }, { "index_sp", 0, NULL }, { "index_zones", 0, NULL }, { "blend_mode", 0, NULL }, { NULL, 0, NULL } }; /// allowed keys for indexer section static KeyDesc_t g_dKeysIndexer[] = { { "mem_limit", 0, NULL }, { "max_iops", 0, NULL }, { "max_iosize", 0, NULL }, { "max_xmlpipe2_field", 0, NULL }, { "max_file_field_buffer", 0, NULL }, { "write_buffer", 0, NULL }, { "on_file_field_error", 0, NULL }, { NULL, 0, NULL } }; /// allowed keys for searchd section static KeyDesc_t g_dKeysSearchd[] = { { "address", KEY_DEPRECATED, "listen" }, { "port", 0, NULL }, { "listen", KEY_LIST, NULL }, { "log", 0, NULL }, { "query_log", 0, NULL }, { "read_timeout", 0, NULL }, { "client_timeout", 0, NULL }, { "max_children", 0, NULL }, { "pid_file", 0, NULL }, { "max_matches", 0, NULL }, { "seamless_rotate", 0, NULL }, { "preopen_indexes", 0, NULL }, { "unlink_old", 0, NULL }, { "ondisk_dict_default", 0, NULL }, { "attr_flush_period", 0, NULL }, { "max_packet_size", 0, NULL }, { "mva_updates_pool", 0, NULL }, { "crash_log_path", KEY_DEPRECATED, NULL }, { "max_filters", 0, NULL }, { "max_filter_values", 0, NULL }, { "listen_backlog", 0, NULL }, { "read_buffer", 0, NULL }, { "read_unhinted", 0, NULL }, { "max_batch_queries", 0, NULL }, { "subtree_docs_cache", 0, NULL }, { "subtree_hits_cache", 0, NULL }, { "workers", 0, NULL }, { "prefork", 0, NULL }, { "dist_threads", 0, NULL }, { "binlog_flush", 0, NULL }, { "binlog_path", 0, NULL }, { "binlog_max_log_size", 0, NULL }, { "thread_stack", 0, NULL }, { "expansion_limit", 0, NULL }, { "compat_sphinxql_magics", 0, NULL }, { "rt_flush_period", 0, NULL }, { "query_log_format", 0, NULL }, { "mysql_version_string", 0, NULL }, { "plugin_dir", 0, NULL }, { "collation_server", 0, NULL }, { "collation_libc_locale", 0, NULL }, { "watchdog", 0, NULL }, { "prefork_rotation_throttle", 0, NULL }, { NULL, 0, NULL } }; ////////////////////////////////////////////////////////////////////////// CSphConfigParser::CSphConfigParser () : m_sFileName ( "" ) , m_iLine ( -1 ) { } bool CSphConfigParser::IsPlainSection ( const char * sKey ) { if ( !strcasecmp ( sKey, "indexer" ) ) return true; if ( !strcasecmp ( sKey, "searchd" ) ) return true; if ( !strcasecmp ( sKey, "search" ) ) return true; return false; } bool CSphConfigParser::IsNamedSection ( const char * sKey ) { if ( !strcasecmp ( sKey, "source" ) ) return true; if ( !strcasecmp ( sKey, "index" ) ) return true; return false; } bool CSphConfigParser::AddSection ( const char * sType, const char * sName ) { m_sSectionType = sType; m_sSectionName = sName; if ( !m_tConf.Exists ( m_sSectionType ) ) m_tConf.Add ( CSphConfigType(), m_sSectionType ); // FIXME! be paranoid, verify that it returned true if ( m_tConf[m_sSectionType].Exists ( m_sSectionName ) ) { snprintf ( m_sError, sizeof(m_sError), "section '%s' (type='%s') already exists", sName, sType ); return false; } m_tConf[m_sSectionType].Add ( CSphConfigSection(), m_sSectionName ); // FIXME! be paranoid, verify that it returned true return true; } void CSphConfigParser::AddKey ( const char * sKey, char * sValue ) { assert ( m_tConf.Exists ( m_sSectionType ) ); assert ( m_tConf[m_sSectionType].Exists ( m_sSectionName ) ); sValue = trim ( sValue ); CSphConfigSection & tSec = m_tConf[m_sSectionType][m_sSectionName]; if ( tSec(sKey) ) { if ( tSec[sKey].m_bTag ) { // override value or list with a new value SafeDelete ( tSec[sKey].m_pNext ); // only leave the first array element tSec[sKey] = sValue; // update its value tSec[sKey].m_bTag = false; // mark it as overridden } else { // chain to tail, to keep the order CSphVariant * pTail = &tSec[sKey]; while ( pTail->m_pNext ) pTail = pTail->m_pNext; pTail->m_pNext = new CSphVariant ( sValue ); } } else { // just add tSec.Add ( sValue, sKey ); // FIXME! be paranoid, verify that it returned true } } bool CSphConfigParser::ValidateKey ( const char * sKey ) { // get proper descriptor table // OPTIMIZE! move lookup to AddSection const KeyDesc_t * pDesc = NULL; if ( m_sSectionType=="source" ) pDesc = g_dKeysSource; else if ( m_sSectionType=="index" ) pDesc = g_dKeysIndex; else if ( m_sSectionType=="indexer" ) pDesc = g_dKeysIndexer; else if ( m_sSectionType=="searchd" ) pDesc = g_dKeysSearchd; if ( !pDesc ) { snprintf ( m_sError, sizeof(m_sError), "unknown section type '%s'", m_sSectionType.cstr() ); return false; } // check if the key is known while ( pDesc->m_sKey && strcasecmp ( pDesc->m_sKey, sKey ) ) pDesc++; if ( !pDesc->m_sKey ) { snprintf ( m_sError, sizeof(m_sError), "unknown key name '%s'", sKey ); return false; } // warn about deprecate keys if ( pDesc->m_iFlags & KEY_DEPRECATED ) if ( ++m_iWarnings<=WARNS_THRESH ) fprintf ( stdout, "WARNING: key '%s' is deprecated in %s line %d; use '%s' instead.\n", sKey, m_sFileName.cstr(), m_iLine, pDesc->m_sExtra ); // warn about list/non-list keys if (!( pDesc->m_iFlags & KEY_LIST )) { CSphConfigSection & tSec = m_tConf[m_sSectionType][m_sSectionName]; if ( tSec(sKey) && !tSec[sKey].m_bTag ) if ( ++m_iWarnings<=WARNS_THRESH ) fprintf ( stdout, "WARNING: key '%s' is not multi-value; value in %s line %d will be ignored.\n", sKey, m_sFileName.cstr(), m_iLine ); } return true; } #if !USE_WINDOWS bool CSphConfigParser::TryToExec ( char * pBuffer, char * pEnd, const char * szFilename, CSphVector & dResult ) { int dPipe[2] = { -1, -1 }; if ( pipe ( dPipe ) ) { snprintf ( m_sError, sizeof ( m_sError ), "pipe() failed (error=%s)", strerror(errno) ); return false; } pBuffer = trim ( pBuffer ); int iRead = dPipe[0]; int iWrite = dPipe[1]; int iChild = fork(); if ( iChild==0 ) { close ( iRead ); close ( STDOUT_FILENO ); dup2 ( iWrite, STDOUT_FILENO ); char * pPtr = pBuffer; char * pArgs = NULL; while ( *pPtr ) { if ( sphIsSpace ( *pPtr ) ) { *pPtr = '\0'; pArgs = trim ( pPtr+1 ); break; } pPtr++; } if ( pArgs ) execl ( pBuffer, pBuffer, pArgs, szFilename, (char*)NULL ); else execl ( pBuffer, pBuffer, szFilename, (char*)NULL ); exit ( 1 ); } else if ( iChild==-1 ) { snprintf ( m_sError, sizeof ( m_sError ), "fork failed: [%d] %s", errno, strerror(errno) ); return false; } close ( iWrite ); int iBytesRead, iTotalRead = 0; const int BUFFER_SIZE = 65536; dResult.Reset (); do { dResult.Resize ( iTotalRead + BUFFER_SIZE ); for ( ;; ) { iBytesRead = read ( iRead, (void*)&(dResult [iTotalRead]), BUFFER_SIZE ); if ( iBytesRead==-1 && errno==EINTR ) // we can get SIGCHLD just before eof continue; break; } iTotalRead += iBytesRead; } while ( iBytesRead > 0 ); int iStatus, iResult; do { // can be interrupted by pretty much anything (e.g. SIGCHLD from other searchd children) iResult = waitpid ( iChild, &iStatus, 0 ); // they say this can happen if child exited and SIGCHLD was ignored // a cleaner one would be to temporary handle it here, but can we be bothered if ( iResult==-1 && errno==ECHILD ) { iResult = iChild; iStatus = 0; } if ( iResult==-1 && errno!=EINTR ) { snprintf ( m_sError, sizeof ( m_sError ), "waitpid() failed: [%d] %s", errno, strerror(errno) ); return false; } } while ( iResult!=iChild ); if ( WIFEXITED ( iStatus ) && WEXITSTATUS ( iStatus ) ) { // FIXME? read stderr and log that too snprintf ( m_sError, sizeof ( m_sError ), "error executing '%s' status = %d", pBuffer, WEXITSTATUS ( iStatus ) ); return false; } if ( WIFSIGNALED ( iStatus ) ) { snprintf ( m_sError, sizeof ( m_sError ), "error executing '%s', killed by signal %d", pBuffer, WTERMSIG ( iStatus ) ); return false; } if ( iBytesRead < 0 ) { snprintf ( m_sError, sizeof ( m_sError ), "pipe read error: [%d] %s", errno, strerror(errno) ); return false; } dResult.Resize ( iTotalRead + 1 ); dResult [iTotalRead] = '\0'; return true; } #endif char * CSphConfigParser::GetBufferString ( char * szDest, int iMax, const char * & szSource ) { int nCopied = 0; while ( nCopied < iMax-1 && szSource[nCopied] && ( nCopied==0 || szSource[nCopied-1]!='\n' ) ) { szDest [nCopied] = szSource [nCopied]; nCopied++; } if ( !nCopied ) return NULL; szSource += nCopied; szDest [nCopied] = '\0'; return szDest; } bool CSphConfigParser::ReParse ( const char * sFileName, const char * pBuffer ) { CSphConfig tOldConfig = m_tConf; m_tConf.Reset(); if ( Parse ( sFileName, pBuffer ) ) return true; m_tConf = tOldConfig; return false; } bool CSphConfigParser::Parse ( const char * sFileName, const char * pBuffer ) { const int L_STEPBACK = 16; const int L_TOKEN = 64; const int L_BUFFER = 8192; FILE * fp = NULL; if ( !pBuffer ) { // open file fp = fopen ( sFileName, "rb" ); if ( !fp ) return false; } // init parser m_sFileName = sFileName; m_iLine = 0; m_iWarnings = 0; char * p = NULL; char * pEnd = NULL; char sBuf [ L_BUFFER ]; char sToken [ L_TOKEN ]; int iToken = 0; int iCh = -1; enum { S_TOP, S_SKIP2NL, S_TOK, S_TYPE, S_SEC, S_CHR, S_VALUE, S_SECNAME, S_SECBASE, S_KEY } eState = S_TOP, eStack[8]; int iStack = 0; int iValue = 0, iValueMax = 65535; char * sValue = new char [ iValueMax+1 ]; #define LOC_ERROR(_msg) { strncpy ( m_sError, _msg, sizeof(m_sError) ); break; } #define LOC_ERROR2(_msg,_a) { snprintf ( m_sError, sizeof(m_sError), _msg, _a ); break; } #define LOC_ERROR3(_msg,_a,_b) { snprintf ( m_sError, sizeof(m_sError), _msg, _a, _b ); break; } #define LOC_ERROR4(_msg,_a,_b,_c) { snprintf ( m_sError, sizeof(m_sError), _msg, _a, _b, _c ); break; } #define LOC_PUSH(_new) { assert ( iStack0 ); eState = eStack[--iStack]; } #define LOC_BACK() { p--; } m_sError[0] = '\0'; for ( ; ; p++ ) { // if this line is over, load next line if ( p>=pEnd ) { char * szResult = pBuffer ? GetBufferString ( sBuf, L_BUFFER, pBuffer ) : fgets ( sBuf, L_BUFFER, fp ); if ( !szResult ) break; // FIXME! check for read error m_iLine++; int iLen = strlen(sBuf); if ( iLen<=0 ) LOC_ERROR ( "internal error; fgets() returned empty string" ); p = sBuf; pEnd = sBuf + iLen; if ( pEnd[-1]!='\n' ) { if ( iLen==L_BUFFER-1 ) LOC_ERROR ( "line too long" ); } } // handle S_TOP state if ( eState==S_TOP ) { if ( isspace(*p) ) continue; if ( *p=='#' ) { #if !USE_WINDOWS if ( !pBuffer && m_iLine==1 && p==sBuf && p[1]=='!' ) { CSphVector dResult; if ( TryToExec ( p+2, pEnd, sFileName, dResult ) ) Parse ( sFileName, &dResult[0] ); break; } else #endif { LOC_PUSH ( S_SKIP2NL ); continue; } } if ( !sphIsAlpha(*p) ) LOC_ERROR ( "invalid token" ); iToken = 0; LOC_PUSH ( S_TYPE ); LOC_PUSH ( S_TOK ); LOC_BACK(); continue; } // handle S_SKIP2NL state if ( eState==S_SKIP2NL ) { LOC_POP (); p = pEnd; continue; } // handle S_TOK state if ( eState==S_TOK ) { if ( !iToken && !sphIsAlpha(*p) )LOC_ERROR ( "internal error (non-alpha in S_TOK pos 0)" ); if ( iToken==sizeof(sToken) ) LOC_ERROR ( "token too long" ); if ( !sphIsAlpha(*p) ) { LOC_POP (); sToken [ iToken ] = '\0'; iToken = 0; LOC_BACK(); continue; } if ( !iToken ) { sToken[0] = '\0'; } sToken [ iToken++ ] = *p; continue; } // handle S_TYPE state if ( eState==S_TYPE ) { if ( isspace(*p) ) continue; if ( *p=='#' ) { LOC_PUSH ( S_SKIP2NL ); continue; } if ( !sToken[0] ) { LOC_ERROR ( "internal error (empty token in S_TYPE)" ); } if ( IsPlainSection(sToken) ) { if ( !AddSection ( sToken, sToken ) ) break; sToken[0] = '\0'; LOC_POP (); LOC_PUSH ( S_SEC ); LOC_PUSH ( S_CHR ); iCh = '{'; LOC_BACK(); continue; } if ( IsNamedSection(sToken) ) { m_sSectionType = sToken; sToken[0] = '\0'; LOC_POP (); LOC_PUSH ( S_SECNAME ); LOC_BACK(); continue; } LOC_ERROR2 ( "invalid section type '%s'", sToken ); } // handle S_CHR state if ( eState==S_CHR ) { if ( isspace(*p) ) continue; if ( *p=='#' ) { LOC_PUSH ( S_SKIP2NL ); continue; } if ( *p!=iCh ) LOC_ERROR3 ( "expected '%c', got '%c'", iCh, *p ); LOC_POP (); continue; } // handle S_SEC state if ( eState==S_SEC ) { if ( isspace(*p) ) continue; if ( *p=='#' ) { LOC_PUSH ( S_SKIP2NL ); continue; } if ( *p=='}' ) { LOC_POP (); continue; } if ( sphIsAlpha(*p) ) { LOC_PUSH ( S_KEY ); LOC_PUSH ( S_TOK ); LOC_BACK(); iValue = 0; sValue[0] = '\0'; continue; } LOC_ERROR2 ( "section contents: expected token, got '%c'", *p ); } // handle S_KEY state if ( eState==S_KEY ) { // validate the key if ( !ValidateKey ( sToken ) ) break; // an assignment operator and a value must follow LOC_POP (); LOC_PUSH ( S_VALUE ); LOC_PUSH ( S_CHR ); iCh = '='; LOC_BACK(); // because we did not work the char at all continue; } // handle S_VALUE state if ( eState==S_VALUE ) { if ( *p=='\n' ) { AddKey ( sToken, sValue ); iValue = 0; LOC_POP (); continue; } if ( *p=='#' ) { AddKey ( sToken, sValue ); iValue = 0; LOC_POP (); LOC_PUSH ( S_SKIP2NL ); continue; } if ( *p=='\\' ) { // backslash at the line end: continuation operator; let the newline be unhanlded if ( p[1]=='\r' || p[1]=='\n' ) { LOC_PUSH ( S_SKIP2NL ); continue; } // backslash before number sign: comment start char escaping; advance and pass it if ( p[1]=='#' ) { p++; } // otherwise: just a char, pass it } if ( iValueWARNS_THRESH ) fprintf ( stdout, "WARNING: %d more warnings skipped.\n", m_iWarnings-WARNS_THRESH ); if ( strlen(m_sError) ) { int iCol = (int)(p-sBuf+1); int iCtx = Min ( L_STEPBACK, iCol ); // error context is upto L_STEPBACK chars back, but never going to prev line const char * sCtx = p-iCtx+1; if ( sCtx pTokenizer ( NULL ); if ( !hIndex("charset_type") || hIndex["charset_type"]=="sbcs" ) { tSettings.m_iType = TOKENIZER_SBCS; } else if ( hIndex["charset_type"]=="utf-8" ) { tSettings.m_iType = hIndex("ngram_chars") ? TOKENIZER_NGRAM : TOKENIZER_UTF8; } else { sError.SetSprintf ( "unknown charset type '%s'", hIndex["charset_type"].cstr() ); return false; } tSettings.m_sCaseFolding = hIndex.GetStr ( "charset_table" ); tSettings.m_iMinWordLen = Max ( hIndex.GetInt ( "min_word_len" ), 0 ); tSettings.m_sNgramChars = hIndex.GetStr ( "ngram_chars" ); tSettings.m_iNgramLen = Max ( hIndex.GetInt ( "ngram_len" ), 0 ); tSettings.m_sSynonymsFile = hIndex.GetStr ( "exceptions" ); // new option name if ( tSettings.m_sSynonymsFile.IsEmpty() ) tSettings.m_sSynonymsFile = hIndex.GetStr ( "synonyms" ); // deprecated option name tSettings.m_sIgnoreChars = hIndex.GetStr ( "ignore_chars" ); tSettings.m_sBlendChars = hIndex.GetStr ( "blend_chars" ); tSettings.m_sBlendMode = hIndex.GetStr ( "blend_mode" ); // phrase boundaries int iBoundaryStep = Max ( hIndex.GetInt ( "phrase_boundary_step" ), -1 ); if ( iBoundaryStep!=0 ) tSettings.m_sBoundary = hIndex.GetStr ( "phrase_boundary" ); return true; } void sphConfDictionary ( const CSphConfigSection & hIndex, CSphDictSettings & tSettings ) { tSettings.m_sMorphology = hIndex.GetStr ( "morphology" ); tSettings.m_sStopwords = hIndex.GetStr ( "stopwords" ); tSettings.m_sWordforms = hIndex.GetStr ( "wordforms" ); tSettings.m_iMinStemmingLen = hIndex.GetInt ( "min_stemming_len", 1 ); if ( hIndex("dict") ) { tSettings.m_bWordDict = false; // default to crc if ( hIndex["dict"]=="keywords" ) tSettings.m_bWordDict = true; else if ( hIndex["dict"]!="crc" ) fprintf ( stdout, "WARNING: unknown dict=%s, defaulting to crc\n", hIndex["dict"].cstr() ); } } bool sphConfIndex ( const CSphConfigSection & hIndex, CSphIndexSettings & tSettings, CSphString & sError ) { // misc settings tSettings.m_iMinPrefixLen = Max ( hIndex.GetInt ( "min_prefix_len" ), 0 ); tSettings.m_iMinInfixLen = Max ( hIndex.GetInt ( "min_infix_len" ), 0 ); tSettings.m_iBoundaryStep = Max ( hIndex.GetInt ( "phrase_boundary_step" ), -1 ); tSettings.m_bIndexExactWords = hIndex.GetInt ( "index_exact_words" )!=0; tSettings.m_iOvershortStep = Min ( Max ( hIndex.GetInt ( "overshort_step", 1 ), 0 ), 1 ); tSettings.m_iStopwordStep = Min ( Max ( hIndex.GetInt ( "stopword_step", 1 ), 0 ), 1 ); // prefix/infix fields CSphString sFields; sFields = hIndex.GetStr ( "prefix_fields" ); sFields.ToLower(); sphSplit ( tSettings.m_dPrefixFields, sFields.cstr() ); sFields = hIndex.GetStr ( "infix_fields" ); sFields.ToLower(); sphSplit ( tSettings.m_dInfixFields, sFields.cstr() ); if ( tSettings.m_iMinPrefixLen==0 && tSettings.m_dPrefixFields.GetLength()!=0 ) { fprintf ( stdout, "WARNING: min_prefix_len=0, prefix_fields ignored\n" ); tSettings.m_dPrefixFields.Reset(); } if ( tSettings.m_iMinInfixLen==0 && tSettings.m_dInfixFields.GetLength()!=0 ) { fprintf ( stdout, "WARNING: min_infix_len=0, infix_fields ignored\n" ); tSettings.m_dInfixFields.Reset(); } // the only way we could have both prefixes and infixes enabled is when specific field subsets are configured if ( tSettings.m_iMinInfixLen>0 && tSettings.m_iMinPrefixLen>0 && ( !tSettings.m_dPrefixFields.GetLength() || !tSettings.m_dInfixFields.GetLength() ) ) { sError.SetSprintf ( "prefixes and infixes can not both be enabled on all fields" ); return false; } tSettings.m_dPrefixFields.Uniq(); tSettings.m_dInfixFields.Uniq(); ARRAY_FOREACH ( i, tSettings.m_dPrefixFields ) if ( tSettings.m_dInfixFields.Contains ( tSettings.m_dPrefixFields[i] ) ) { sError.SetSprintf ( "field '%s' marked both as prefix and infix", tSettings.m_dPrefixFields[i].cstr() ); return false; } // html stripping if ( hIndex ( "html_strip" ) ) { tSettings.m_bHtmlStrip = hIndex.GetInt ( "html_strip" )!=0; tSettings.m_sHtmlIndexAttrs = hIndex.GetStr ( "html_index_attrs" ); tSettings.m_sHtmlRemoveElements = hIndex.GetStr ( "html_remove_elements" ); } // docinfo tSettings.m_eDocinfo = SPH_DOCINFO_EXTERN; if ( hIndex("docinfo") ) { if ( hIndex["docinfo"]=="none" ) tSettings.m_eDocinfo = SPH_DOCINFO_NONE; else if ( hIndex["docinfo"]=="inline" ) tSettings.m_eDocinfo = SPH_DOCINFO_INLINE; else if ( hIndex["docinfo"]=="extern" ) tSettings.m_eDocinfo = SPH_DOCINFO_EXTERN; else fprintf ( stdout, "WARNING: unknown docinfo=%s, defaulting to extern\n", hIndex["docinfo"].cstr() ); } // hit format // TODO! add the description into documentation. tSettings.m_eHitFormat = SPH_HIT_FORMAT_INLINE; if ( hIndex("hit_format") ) { if ( hIndex["hit_format"]=="plain" ) tSettings.m_eHitFormat = SPH_HIT_FORMAT_PLAIN; else if ( hIndex["hit_format"]=="inline" ) tSettings.m_eHitFormat = SPH_HIT_FORMAT_INLINE; else fprintf ( stdout, "WARNING: unknown hit_format=%s, defaulting to inline\n", hIndex["hit_format"].cstr() ); } // hit-less indices if ( hIndex("hitless_words") ) { for ( const CSphVariant * pVariant = &hIndex["hitless_words"]; pVariant; pVariant = pVariant->m_pNext ) { const CSphString & sValue = *pVariant; if ( sValue=="all" ) { tSettings.m_eHitless = SPH_HITLESS_ALL; } else { tSettings.m_eHitless = SPH_HITLESS_SOME; tSettings.m_sHitlessFile = sValue; } } } // sentence and paragraph indexing tSettings.m_bIndexSP = ( hIndex.GetInt ( "index_sp" )!=0 ); tSettings.m_sZones = hIndex.GetStr ( "index_zones" ); // all good return true; } bool sphFixupIndexSettings ( CSphIndex * pIndex, const CSphConfigSection & hIndex, CSphString & sError ) { bool bTokenizerSpawned = false; if ( !pIndex->GetTokenizer () ) { CSphTokenizerSettings tSettings; if ( !sphConfTokenizer ( hIndex, tSettings, sError ) ) return false; ISphTokenizer * pTokenizer = ISphTokenizer::Create ( tSettings, sError ); if ( !pTokenizer ) return false; bTokenizerSpawned = true; pIndex->SetTokenizer ( pTokenizer ); } if ( !pIndex->GetDictionary () ) { CSphDictSettings tSettings; if ( pIndex->m_bId32to64 ) tSettings.m_bCrc32 = true; sphConfDictionary ( hIndex, tSettings ); CSphDict * pDict = sphCreateDictionaryCRC ( tSettings, pIndex->GetTokenizer (), sError, pIndex->GetName() ); if ( !pDict ) return false; pIndex->SetDictionary ( pDict ); } if ( bTokenizerSpawned ) { ISphTokenizer * pTokenizer = pIndex->LeakTokenizer (); ISphTokenizer * pTokenFilter = ISphTokenizer::CreateTokenFilter ( pTokenizer, pIndex->GetDictionary ()->GetMultiWordforms () ); pIndex->SetTokenizer ( pTokenFilter ? pTokenFilter : pTokenizer ); } if ( !pIndex->IsStripperInited () ) { CSphIndexSettings tSettings = pIndex->GetSettings (); if ( hIndex ( "html_strip" ) ) { tSettings.m_bHtmlStrip = hIndex.GetInt ( "html_strip" )!=0; tSettings.m_sHtmlIndexAttrs = hIndex.GetStr ( "html_index_attrs" ); tSettings.m_sHtmlRemoveElements = hIndex.GetStr ( "html_remove_elements" ); } tSettings.m_sZones = hIndex.GetStr ( "index_zones" ); pIndex->Setup ( tSettings ); } pIndex->PostSetup(); return true; } ////////////////////////////////////////////////////////////////////////// const char * sphLoadConfig ( const char * sOptConfig, bool bQuiet, CSphConfigParser & cp ) { // fallback to defaults if there was no explicit config specified while ( !sOptConfig ) { #ifdef SYSCONFDIR sOptConfig = SYSCONFDIR "/sphinx.conf"; if ( sphIsReadable ( sOptConfig ) ) break; #endif sOptConfig = "./sphinx.conf"; if ( sphIsReadable ( sOptConfig ) ) break; sOptConfig = NULL; break; } if ( !sOptConfig ) sphDie ( "no readable config file (looked in " #ifdef SYSCONFDIR SYSCONFDIR "/sphinx.conf, " #endif "./sphinx.conf)" ); if ( !bQuiet ) fprintf ( stdout, "using config file '%s'...\n", sOptConfig ); // load config if ( !cp.Parse ( sOptConfig ) ) sphDie ( "failed to parse config file '%s'", sOptConfig ); CSphConfig & hConf = cp.m_tConf; if ( !hConf ( "index" ) ) sphDie ( "no indexes found in config file '%s'", sOptConfig ); return sOptConfig; } ////////////////////////////////////////////////////////////////////////// static SphLogger_fn g_pLogger = NULL; inline void Log ( ESphLogLevel eLevel, const char * sFmt, va_list ap ) { if ( !g_pLogger ) return; ( *g_pLogger ) ( eLevel, sFmt, ap ); } void sphWarning ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_WARNING, sFmt, ap ); va_end ( ap ); } void sphInfo ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_INFO, sFmt, ap ); va_end ( ap ); } void sphLogFatal ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_FATAL, sFmt, ap ); va_end ( ap ); } void sphLogDebug ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_DEBUG, sFmt, ap ); va_end ( ap ); } void sphLogDebugv ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_VERBOSE_DEBUG, sFmt, ap ); va_end ( ap ); } void sphLogDebugvv ( const char * sFmt, ... ) { va_list ap; va_start ( ap, sFmt ); Log ( SPH_LOG_VERY_VERBOSE_DEBUG, sFmt, ap ); va_end ( ap ); } void sphSetLogger ( SphLogger_fn fnLog ) { g_pLogger = fnLog; } ////////////////////////////////////////////////////////////////////////// // CRASH REPORTING ////////////////////////////////////////////////////////////////////////// template static void UItoA ( char** ppOutput, Uint uVal, int iBase=10, int iWidth=0, int iPrec=0, const char cFill=' ' ) { assert ( ppOutput ); assert ( *ppOutput ); const char cDigits[] = "0123456789abcdef"; if ( iWidth && iPrec ) { iPrec = iWidth; iWidth = 0; } if ( !uVal ) { if ( !iPrec && !iWidth ) *(*ppOutput)++ = cDigits[0]; else { while ( iPrec-- ) *(*ppOutput)++ = cDigits[0]; if ( iWidth ) { while ( --iWidth ) *(*ppOutput)++ = cFill; *(*ppOutput)++ = cDigits[0]; } } return; } const BYTE uMaxIndex = 31; // 20 digits for MAX_INT64 in decimal; let it be 31 (32 digits max). char CBuf[uMaxIndex+1]; char *pRes = &CBuf[uMaxIndex]; char *& pOutput = *ppOutput; while ( uVal ) { *pRes-- = cDigits [ uVal % iBase ]; uVal /= iBase; } BYTE uLen = (BYTE)( uMaxIndex - (pRes-CBuf) ); if ( iWidth ) while ( uLen < iWidth ) { *pOutput++ = cFill; iWidth--; } if ( iPrec ) { while ( uLen < iPrec ) { *pOutput++=cDigits[0]; iPrec--; } iPrec = uLen-iPrec; } while ( pRes < CBuf+uMaxIndex-iPrec ) *pOutput++ = *++pRes; } static int sphVSprintf ( char * pOutput, const char * sFmt, va_list ap ) { enum eStates { SNORMAL, SPERCENT, SHAVEFILL, SINWIDTH, SINPREC }; eStates state = SNORMAL; int iPrec = 0; int iWidth = 0; char cFill = ' '; const char * pBegin = pOutput; bool bHeadingSpace = true; char c; while ( ( c = *sFmt++ )!=0 ) { // handle percent if ( c=='%' ) { if ( state==SNORMAL ) { state = SPERCENT; iPrec = 0; iWidth = 0; cFill = ' '; } else { state = SNORMAL; *pOutput++ = c; } continue; } // handle regular chars if ( state==SNORMAL ) { *pOutput++ = c; continue; } // handle modifiers switch ( c ) { case '0': if ( state==SPERCENT ) { cFill = '0'; state = SHAVEFILL; break; } case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': if ( state==SPERCENT || state==SHAVEFILL ) { state = SINWIDTH; iWidth = c - '0'; } else if ( state==SINWIDTH ) iWidth = iWidth * 10 + c - '0'; else if ( state==SINPREC ) iPrec = iPrec * 10 + c - '0'; break; case '-': if ( state==SPERCENT ) bHeadingSpace = false; else state = SNORMAL; // FIXME? means that bad/unhandled syntax with dash will be just ignored break; case '.': state = SINPREC; iPrec = 0; break; case 's': // string { const char * pValue = va_arg ( ap, const char * ); int iValue = strlen ( pValue ); if ( iWidth && bHeadingSpace ) while ( iValue < iWidth-- ) *pOutput++ = ' '; if ( iPrec && iPrec < iValue ) while ( iPrec-- ) *pOutput++ = *pValue++; else while ( *pValue ) *pOutput++ = *pValue++; if ( iWidth && !bHeadingSpace ) while ( iValue < iWidth-- ) *pOutput++ = ' '; state = SNORMAL; break; } case 'p': // pointer { void * pValue = va_arg ( ap, void * ); uint64_t uValue = uint64_t ( pValue ); UItoA ( &pOutput, uValue, 16, iWidth, iPrec, cFill ); state = SNORMAL; break; } case 'x': // hex integer case 'd': // decimal integer { DWORD uValue = va_arg ( ap, DWORD ); UItoA ( &pOutput, uValue, ( c=='x' ) ? 16 : 10, iWidth, iPrec, cFill ); state = SNORMAL; break; } case 'l': // decimal int64 { int64_t iValue = va_arg ( ap, int64_t ); UItoA ( &pOutput, iValue, 10, iWidth, iPrec, cFill ); state = SNORMAL; break; } default: state = SNORMAL; *pOutput++ = c; } } // final zero to EOL *pOutput++ = '\n'; return pOutput - pBegin; } bool sphWrite ( int iFD, const void * pBuf, size_t iSize ) { return ( iSize==(size_t)::write ( iFD, pBuf, iSize ) ); } static char g_sSafeInfoBuf [ 1024 ]; void sphSafeInfo ( int iFD, const char * sFmt, ... ) { if ( iFD<0 || !sFmt ) return; va_list ap; va_start ( ap, sFmt ); int iLen = sphVSprintf ( g_sSafeInfoBuf, sFmt, ap ); // FIXME! make this vsnprintf va_end ( ap ); sphWrite ( iFD, g_sSafeInfoBuf, iLen ); } #if !USE_WINDOWS #define SPH_BACKTRACE_ADDR_COUNT 128 static void * g_pBacktraceAddresses [SPH_BACKTRACE_ADDR_COUNT]; void sphBacktrace ( int iFD, bool bSafe ) { if ( iFD<0 ) return; sphSafeInfo ( iFD, "-------------- backtrace begins here ---------------" ); #ifdef COMPILER sphSafeInfo ( iFD, "Program compiled with " COMPILER ); #endif #ifdef OS_UNAME sphSafeInfo ( iFD, "Host OS is "OS_UNAME ); #endif bool bOk = true; void * pMyStack = NULL; int iStackSize = 0; if ( !bSafe ) { pMyStack = sphMyStack(); iStackSize = sphMyStackSize(); } sphSafeInfo ( iFD, "Stack bottom = 0x%p, thread stack size = 0x%x", pMyStack, iStackSize ); while ( pMyStack && !bSafe ) { sphSafeInfo ( iFD, "begin of manual backtrace:" ); BYTE ** pFramePointer = NULL; int iFrameCount = 0; int iReturnFrameCount = sphIsLtLib() ? 2 : 1; #ifdef __i386__ #define SIGRETURN_FRAME_OFFSET 17 __asm __volatile__ ( "movl %%ebp,%0":"=r"(pFramePointer):"r"(pFramePointer) ); #endif #ifdef __x86_64__ #define SIGRETURN_FRAME_OFFSET 23 __asm __volatile__ ( "movq %%rbp,%0":"=r"(pFramePointer):"r"(pFramePointer) ); #endif #ifndef SIGRETURN_FRAME_OFFSET #define SIGRETURN_FRAME_OFFSET 0 #endif if ( !pFramePointer ) { sphSafeInfo ( iFD, "Frame pointer is null, backtrace failed (did you build with -fomit-frame-pointer?)" ); break; } if ( !pMyStack || (BYTE*) pMyStack > (BYTE*) &pFramePointer ) { int iRound = Min ( 65536, iStackSize ); pMyStack = (void *) ( ( (size_t) &pFramePointer + iRound ) & ~(size_t)65535 ); sphSafeInfo ( iFD, "Something wrong with thread stack, backtrace may be incorrect (fp=%p)", pFramePointer ); if ( pFramePointer > (BYTE**) pMyStack || pFramePointer < (BYTE**) pMyStack - iStackSize ) { sphSafeInfo ( iFD, "Wrong stack limit or frame pointer, backtrace failed (fp=%p, stack=%p, stacksize=%d)", pFramePointer, pMyStack, iStackSize ); break; } } sphSafeInfo ( iFD, "Stack looks OK, attempting backtrace." ); BYTE** pNewFP; while ( pFramePointer < (BYTE**) pMyStack ) { pNewFP = (BYTE**) *pFramePointer; sphSafeInfo ( iFD, "%p", iFrameCount==iReturnFrameCount? *(pFramePointer + SIGRETURN_FRAME_OFFSET) : *(pFramePointer + 1) ); bOk = pNewFP > pFramePointer; if ( !bOk ) break; pFramePointer = pNewFP; iFrameCount++; } if ( !bOk ) sphSafeInfo ( iFD, "Something wrong in frame pointers, backtrace failed (fp=%p)", pNewFP ); break; } #if HAVE_BACKTRACE sphSafeInfo ( iFD, "begin of system backtrace:" ); int iDepth = backtrace ( g_pBacktraceAddresses, SPH_BACKTRACE_ADDR_COUNT ); #if HAVE_BACKTRACE_SYMBOLS sphSafeInfo ( iFD, "begin of system symbols:" ); backtrace_symbols_fd ( g_pBacktraceAddresses, iDepth, iFD ); #elif !HAVE_BACKTRACE_SYMBOLS sphSafeInfo ( iFD, "begin of manual symbols:" ); for ( int i=0; i indexer.sym\n" " 2. Attach the binary, generated .sym and the text of backtrace (see above) to the bug report.\n" "Also you can read the section about resolving backtraces in the documentation."); sphSafeInfo ( iFD, "-------------- backtrace ends here ---------------" ); } #else // USE_WINDOWS void sphBacktrace ( EXCEPTION_POINTERS * pExc, const char * sFile ) { if ( !pExc || !sFile || !(*sFile) ) { sphInfo ( "can't generate minidump" ); return; } HANDLE hFile = CreateFile ( sFile, GENERIC_WRITE, 0, 0, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, 0 ); if ( hFile==INVALID_HANDLE_VALUE ) { sphInfo ( "can't create minidump file '%s'", sFile ); return; } MINIDUMP_EXCEPTION_INFORMATION tExcInfo; tExcInfo.ExceptionPointers = pExc; tExcInfo.ClientPointers = FALSE; tExcInfo.ThreadId = GetCurrentThreadId(); bool bDumped = ( MiniDumpWriteDump ( GetCurrentProcess(), GetCurrentProcessId(), hFile, MiniDumpNormal, &tExcInfo, 0, 0 )==TRUE ); CloseHandle ( hFile ); if ( !bDumped ) sphInfo ( "can't dump minidump" ); } #endif // USE_WINDOWS // // $Id: sphinxutils.cpp 3109 2012-02-19 14:13:20Z shodan $ // sphinx-2.0.4-release/src/yysphinxexpr.h0000644000176700017710000000646211605620330017501 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_CONST_INT = 258, TOK_CONST_FLOAT = 259, TOK_CONST_STRING = 260, TOK_ATTR_INT = 261, TOK_ATTR_BITS = 262, TOK_ATTR_FLOAT = 263, TOK_ATTR_MVA32 = 264, TOK_ATTR_MVA64 = 265, TOK_ATTR_STRING = 266, TOK_FUNC = 267, TOK_FUNC_IN = 268, TOK_USERVAR = 269, TOK_UDF = 270, TOK_HOOK_IDENT = 271, TOK_HOOK_FUNC = 272, TOK_ATID = 273, TOK_ATWEIGHT = 274, TOK_ID = 275, TOK_WEIGHT = 276, TOK_COUNT = 277, TOK_DISTINCT = 278, TOK_CONST_LIST = 279, TOK_ATTR_SINT = 280, TOK_OR = 281, TOK_AND = 282, TOK_NE = 283, TOK_EQ = 284, TOK_GTE = 285, TOK_LTE = 286, TOK_MOD = 287, TOK_DIV = 288, TOK_NOT = 289, TOK_NEG = 290 }; #endif #define TOK_CONST_INT 258 #define TOK_CONST_FLOAT 259 #define TOK_CONST_STRING 260 #define TOK_ATTR_INT 261 #define TOK_ATTR_BITS 262 #define TOK_ATTR_FLOAT 263 #define TOK_ATTR_MVA32 264 #define TOK_ATTR_MVA64 265 #define TOK_ATTR_STRING 266 #define TOK_FUNC 267 #define TOK_FUNC_IN 268 #define TOK_USERVAR 269 #define TOK_UDF 270 #define TOK_HOOK_IDENT 271 #define TOK_HOOK_FUNC 272 #define TOK_ATID 273 #define TOK_ATWEIGHT 274 #define TOK_ID 275 #define TOK_WEIGHT 276 #define TOK_COUNT 277 #define TOK_DISTINCT 278 #define TOK_CONST_LIST 279 #define TOK_ATTR_SINT 280 #define TOK_OR 281 #define TOK_AND 282 #define TOK_NE 283 #define TOK_EQ 284 #define TOK_GTE 285 #define TOK_LTE 286 #define TOK_MOD 287 #define TOK_DIV 288 #define TOK_NOT 289 #define TOK_NEG 290 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef union YYSTYPE { int64_t iConst; // constant value float fConst; // constant value uint64_t iAttrLocator; // attribute locator (rowitem for int/float; offset+size for bits) int iFunc; // function id int iNode; // node, or uservar, or udf index } YYSTYPE; /* Line 1204 of yacc.c. */ # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif sphinx-2.0.4-release/src/yysphinxql.h0000644000176700017710000001262511640064330017136 0ustar deogardeogar/* A Bison parser, made by GNU Bison 1.875. */ /* Skeleton parser for Yacc-like parsing with Bison, Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002 Free Software Foundation, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* As a special exception, when this file is copied by Bison into a Bison output file, you may use that output file without restriction. This special exception was added by the Free Software Foundation in version 1.24 of Bison. */ /* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE /* Put the tokens into the symbol table, so that GDB and other debuggers know about them. */ enum yytokentype { TOK_IDENT = 258, TOK_ATIDENT = 259, TOK_CONST_INT = 260, TOK_CONST_FLOAT = 261, TOK_CONST_MVA = 262, TOK_QUOTED_STRING = 263, TOK_USERVAR = 264, TOK_SYSVAR = 265, TOK_CONST_STRINGS = 266, TOK_AS = 267, TOK_ASC = 268, TOK_ATTACH = 269, TOK_AVG = 270, TOK_BEGIN = 271, TOK_BETWEEN = 272, TOK_BY = 273, TOK_CALL = 274, TOK_COLLATION = 275, TOK_COMMIT = 276, TOK_COMMITTED = 277, TOK_COUNT = 278, TOK_CREATE = 279, TOK_DELETE = 280, TOK_DESC = 281, TOK_DESCRIBE = 282, TOK_DISTINCT = 283, TOK_DIV = 284, TOK_DROP = 285, TOK_FALSE = 286, TOK_FLOAT = 287, TOK_FLUSH = 288, TOK_FROM = 289, TOK_FUNCTION = 290, TOK_GLOBAL = 291, TOK_GROUP = 292, TOK_ID = 293, TOK_IN = 294, TOK_INDEX = 295, TOK_INSERT = 296, TOK_INT = 297, TOK_INTO = 298, TOK_ISOLATION = 299, TOK_LEVEL = 300, TOK_LIMIT = 301, TOK_MATCH = 302, TOK_MAX = 303, TOK_META = 304, TOK_MIN = 305, TOK_MOD = 306, TOK_NAMES = 307, TOK_NULL = 308, TOK_OPTION = 309, TOK_ORDER = 310, TOK_RAND = 311, TOK_READ = 312, TOK_REPEATABLE = 313, TOK_REPLACE = 314, TOK_RETURNS = 315, TOK_ROLLBACK = 316, TOK_RTINDEX = 317, TOK_SELECT = 318, TOK_SERIALIZABLE = 319, TOK_SET = 320, TOK_SESSION = 321, TOK_SHOW = 322, TOK_SONAME = 323, TOK_START = 324, TOK_STATUS = 325, TOK_SUM = 326, TOK_TABLES = 327, TOK_TO = 328, TOK_TRANSACTION = 329, TOK_TRUE = 330, TOK_UNCOMMITTED = 331, TOK_UPDATE = 332, TOK_VALUES = 333, TOK_VARIABLES = 334, TOK_WARNINGS = 335, TOK_WEIGHT = 336, TOK_WHERE = 337, TOK_WITHIN = 338, TOK_OR = 339, TOK_AND = 340, TOK_NE = 341, TOK_GTE = 342, TOK_LTE = 343, TOK_NOT = 344, TOK_NEG = 345 }; #endif #define TOK_IDENT 258 #define TOK_ATIDENT 259 #define TOK_CONST_INT 260 #define TOK_CONST_FLOAT 261 #define TOK_CONST_MVA 262 #define TOK_QUOTED_STRING 263 #define TOK_USERVAR 264 #define TOK_SYSVAR 265 #define TOK_CONST_STRINGS 266 #define TOK_AS 267 #define TOK_ASC 268 #define TOK_ATTACH 269 #define TOK_AVG 270 #define TOK_BEGIN 271 #define TOK_BETWEEN 272 #define TOK_BY 273 #define TOK_CALL 274 #define TOK_COLLATION 275 #define TOK_COMMIT 276 #define TOK_COMMITTED 277 #define TOK_COUNT 278 #define TOK_CREATE 279 #define TOK_DELETE 280 #define TOK_DESC 281 #define TOK_DESCRIBE 282 #define TOK_DISTINCT 283 #define TOK_DIV 284 #define TOK_DROP 285 #define TOK_FALSE 286 #define TOK_FLOAT 287 #define TOK_FLUSH 288 #define TOK_FROM 289 #define TOK_FUNCTION 290 #define TOK_GLOBAL 291 #define TOK_GROUP 292 #define TOK_ID 293 #define TOK_IN 294 #define TOK_INDEX 295 #define TOK_INSERT 296 #define TOK_INT 297 #define TOK_INTO 298 #define TOK_ISOLATION 299 #define TOK_LEVEL 300 #define TOK_LIMIT 301 #define TOK_MATCH 302 #define TOK_MAX 303 #define TOK_META 304 #define TOK_MIN 305 #define TOK_MOD 306 #define TOK_NAMES 307 #define TOK_NULL 308 #define TOK_OPTION 309 #define TOK_ORDER 310 #define TOK_RAND 311 #define TOK_READ 312 #define TOK_REPEATABLE 313 #define TOK_REPLACE 314 #define TOK_RETURNS 315 #define TOK_ROLLBACK 316 #define TOK_RTINDEX 317 #define TOK_SELECT 318 #define TOK_SERIALIZABLE 319 #define TOK_SET 320 #define TOK_SESSION 321 #define TOK_SHOW 322 #define TOK_SONAME 323 #define TOK_START 324 #define TOK_STATUS 325 #define TOK_SUM 326 #define TOK_TABLES 327 #define TOK_TO 328 #define TOK_TRANSACTION 329 #define TOK_TRUE 330 #define TOK_UNCOMMITTED 331 #define TOK_UPDATE 332 #define TOK_VALUES 333 #define TOK_VARIABLES 334 #define TOK_WARNINGS 335 #define TOK_WEIGHT 336 #define TOK_WHERE 337 #define TOK_WITHIN 338 #define TOK_OR 339 #define TOK_AND 340 #define TOK_NE 341 #define TOK_GTE 342 #define TOK_LTE 343 #define TOK_NOT 344 #define TOK_NEG 345 #if ! defined (YYSTYPE) && ! defined (YYSTYPE_IS_DECLARED) typedef int YYSTYPE; # define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 # define YYSTYPE_IS_TRIVIAL 1 #endif sphinx-2.0.4-release/src/sphinx.h0000644000176700017710000025204111723635623016230 0ustar deogardeogar// // $Id: sphinx.h 3131 2012-03-01 09:04:19Z deogar $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinx_ #define _sphinx_ ///////////////////////////////////////////////////////////////////////////// #ifdef _WIN32 #define USE_MYSQL 1 /// whether to compile MySQL support #define USE_PGSQL 0 /// whether to compile PgSQL support #define USE_ODBC 1 /// whether to compile ODBC support #define USE_LIBEXPAT 1 /// whether to compile libexpat support #define USE_LIBICONV 1 /// whether to compile iconv support #define USE_LIBXML 0 /// whether to compile libxml support #define USE_LIBSTEMMER 0 /// whether to compile libstemmber support #define USE_WINDOWS 1 /// whether to compile for Windows #define USE_SYSLOG 0 /// whether to use syslog for logging #define UNALIGNED_RAM_ACCESS 1 #define USE_LITTLE_ENDIAN 1 #else #define USE_WINDOWS 0 /// whether to compile for Windows #endif ///////////////////////////////////////////////////////////////////////////// #include "sphinxstd.h" #include "sphinxexpr.h" // to remove? #include #include #include #include #ifdef HAVE_CONFIG_H #include "config.h" #endif #if USE_PGSQL #include #endif #if USE_WINDOWS #include #else #include #include #endif #if USE_MYSQL #include #endif #if USE_WINDOWS typedef __int64 SphOffset_t; #define STDOUT_FILENO fileno(stdout) #define STDERR_FILENO fileno(stderr) #else typedef off_t SphOffset_t; #endif #if USE_ODBC #include #endif ///////////////////////////////////////////////////////////////////////////// #ifndef USE_64BIT #define USE_64BIT 0 #endif #if USE_64BIT // use 64-bit unsigned integers to store document and word IDs #define SPHINX_BITS_TAG "-id64" typedef uint64_t SphWordID_t; typedef uint64_t SphDocID_t; #define DOCID_MAX U64C(0xffffffffffffffff) #define DOCID_FMT UINT64_FMT #define DOCINFO_IDSIZE 2 STATIC_SIZE_ASSERT ( SphWordID_t, 8 ); STATIC_SIZE_ASSERT ( SphDocID_t, 8 ); #else // use 32-bit unsigned integers to store document and word IDs #define SPHINX_BITS_TAG "" typedef DWORD SphWordID_t; typedef DWORD SphDocID_t; #define DOCID_MAX 0xffffffffUL #define DOCID_FMT "%u" #define DOCINFO_IDSIZE 1 STATIC_SIZE_ASSERT ( SphWordID_t, 4 ); STATIC_SIZE_ASSERT ( SphDocID_t, 4 ); #endif // USE_64BIT #define DWSIZEOF(a) ( sizeof(a) / sizeof(DWORD) ) ////////////////////////////////////////////////////////////////////////// /// row entry (storage only, does not necessarily map 1:1 to attributes) typedef DWORD CSphRowitem; /// widest integer type that can be be stored as an attribute (ideally, fully decoupled from rowitem size!) typedef int64_t SphAttr_t; const CSphRowitem ROWITEM_MAX = UINT_MAX; const int ROWITEM_BITS = 8*sizeof(CSphRowitem); const int ROWITEM_SHIFT = 5; STATIC_ASSERT ( ( 1 << ROWITEM_SHIFT )==ROWITEM_BITS, INVALID_ROWITEM_SHIFT ); #ifndef USE_LITTLE_ENDIAN #error Please define endianness #endif template < typename DOCID > inline DOCID DOCINFO2ID_T ( const DWORD * pDocinfo ); template<> inline DWORD DOCINFO2ID_T ( const DWORD * pDocinfo ) { return pDocinfo[0]; } template<> inline uint64_t DOCINFO2ID_T ( const DWORD * pDocinfo ) { #if USE_LITTLE_ENDIAN return uint64_t(pDocinfo[0]) + (uint64_t(pDocinfo[1])<<32); #else return uint64_t(pDocinfo[1]) + (uint64_t(pDocinfo[0])<<32); #endif } inline void DOCINFOSETID ( DWORD * pDocinfo, DWORD uValue ) { *pDocinfo = uValue; } inline void DOCINFOSETID ( DWORD * pDocinfo, uint64_t uValue ) { #if USE_LITTLE_ENDIAN pDocinfo[0] = (DWORD)uValue; pDocinfo[1] = (DWORD)(uValue>>32); #else pDocinfo[0] = (DWORD)(uValue>>32); pDocinfo[1] = (DWORD)uValue; #endif } inline SphDocID_t DOCINFO2ID ( const DWORD * pDocinfo ) { return DOCINFO2ID_T ( pDocinfo ); } #if PARANOID template < typename DOCID > inline DWORD * DOCINFO2ATTRS_T ( DWORD * pDocinfo ) { assert ( pDocinfo ); return pDocinfo+DWSIZEOF(DOCID); } template < typename DOCID > inline const DWORD * DOCINFO2ATTRS_T ( const DWORD * pDocinfo ) { assert ( pDocinfo ); return pDocinfo+DWSIZEOF(DOCID); } template < typename DOCID > inline DWORD * STATIC2DOCINFO_T ( DWORD * pAttrs ) { assert ( pDocinfo ); return pAttrs-DWSIZEOF(DOCID); } template < typename DOCID > inline const DWORD * STATIC2DOCINFO_T ( const DWORD * pAttrs ) { assert ( pDocinfo ); return pAttrs-DWSIZEOF(DOCID); } #else template < typename DOCID > inline DWORD * DOCINFO2ATTRS_T ( DWORD * pDocinfo ) { return pDocinfo + DWSIZEOF(DOCID); } template < typename DOCID > inline const DWORD * DOCINFO2ATTRS_T ( const DWORD * pDocinfo ) { return pDocinfo + DWSIZEOF(DOCID); } template < typename DOCID > inline DWORD * STATIC2DOCINFO_T ( DWORD * pAttrs ) { return pAttrs - DWSIZEOF(DOCID); } template < typename DOCID > inline const DWORD * STATIC2DOCINFO_T ( const DWORD * pAttrs ) { return pAttrs - DWSIZEOF(DOCID); } #endif inline DWORD * DOCINFO2ATTRS ( DWORD * pDocinfo ) { return DOCINFO2ATTRS_T(pDocinfo); } inline const DWORD * DOCINFO2ATTRS ( const DWORD * pDocinfo ) { return DOCINFO2ATTRS_T(pDocinfo); } inline DWORD * STATIC2DOCINFO ( DWORD * pAttrs ) { return STATIC2DOCINFO_T(pAttrs); } inline const DWORD * STATIC2DOCINFO ( const DWORD * pAttrs ) { return STATIC2DOCINFO_T(pAttrs); } ///////////////////////////////////////////////////////////////////////////// #include "sphinxversion.h" #ifndef SPHINX_TAG #define SPHINX_TAG "-dev" #endif #define SPHINX_VERSION "2.0.4" SPHINX_BITS_TAG SPHINX_TAG " (" SPH_SVN_TAGREV ")" #define SPHINX_BANNER "Sphinx " SPHINX_VERSION "\nCopyright (c) 2001-2012, Andrew Aksyonoff\nCopyright (c) 2008-2012, Sphinx Technologies Inc (http://sphinxsearch.com)\n\n" #define SPHINX_SEARCHD_PROTO 1 #define SPH_MAX_WORD_LEN 42 // so that any UTF-8 word fits 127 bytes #define SPH_MAX_FILENAME_LEN 512 #define SPH_MAX_FIELDS 256 ///////////////////////////////////////////////////////////////////////////// /// microsecond precision timestamp /// current UNIX timestamp in seconds multiplied by 1000000, plus microseconds since the beginning of current second int64_t sphMicroTimer (); /// Sphinx CRC32 implementation DWORD sphCRC32 ( const BYTE * pString ); DWORD sphCRC32 ( const BYTE * pString, int iLen ); DWORD sphCRC32 ( const BYTE * pString, int iLen, DWORD uPrevCRC ); /// Sphinx FNV64 implementation const uint64_t SPH_FNV64_SEED = 0xcbf29ce484222325ULL; uint64_t sphFNV64 ( const BYTE * pString ); uint64_t sphFNV64 ( const BYTE * s, int iLen, uint64_t uPrev = SPH_FNV64_SEED ); /// calculate file crc32 bool sphCalcFileCRC32 ( const char * szFilename, DWORD & uCRC32 ); /// replaces all occurences of sMacro in sTemplate with textual representation of uValue char * sphStrMacro ( const char * sTemplate, const char * sMacro, SphDocID_t uValue ); /// try to obtain an exclusive lock on specified file /// bWait specifies whether to wait bool sphLockEx ( int iFile, bool bWait ); /// remove existing locks void sphLockUn ( int iFile ); /// millisecond-precision sleep void sphSleepMsec ( int iMsec ); /// check if file exists and is a readable file bool sphIsReadable ( const char * sFilename, CSphString * pError=NULL ); /// set throttling options void sphSetThrottling ( int iMaxIOps, int iMaxIOSize ); /// immediately interrupt current query void sphInterruptNow(); #if !USE_WINDOWS /// set process info void sphSetProcessInfo ( bool bHead ); #endif struct CSphIOStats { int64_t m_iReadTime; DWORD m_iReadOps; int64_t m_iReadBytes; int64_t m_iWriteTime; DWORD m_iWriteOps; int64_t m_iWriteBytes; }; /// clear stats, starts collecting void sphStartIOStats (); /// stops collecting stats, returns results const CSphIOStats & sphStopIOStats (); ////////////////////////////////////////////////////////////////////////// #if UNALIGNED_RAM_ACCESS /// pass-through wrapper template < typename T > inline T sphUnalignedRead ( const T & tRef ) { return tRef; } /// pass-through wrapper template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { *(T*)pPtr = tVal; } #else /// unaligned read wrapper for some architectures (eg. SPARC) template < typename T > inline T sphUnalignedRead ( const T & tRef ) { T uTmp; BYTE * pSrc = (BYTE *) &tRef; BYTE * pDst = (BYTE *) &uTmp; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; return uTmp; } /// unaligned write wrapper for some architectures (eg. SPARC) template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { BYTE * pDst = (BYTE *) pPtr; BYTE * pSrc = (BYTE *) &tVal; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; } #endif int sphUTF8Len ( const char * pStr ); /// check for valid attribute name char inline int sphIsAttr ( int c ) { // different from sphIsAlpha() in that we don't allow minus return ( c>='0' && c<='9' ) || ( c>='a' && c<='z' ) || ( c>='A' && c<='Z' ) || c=='_'; } ///////////////////////////////////////////////////////////////////////////// // TOKENIZERS ///////////////////////////////////////////////////////////////////////////// extern const char * SPHINX_DEFAULT_SBCS_TABLE; extern const char * SPHINX_DEFAULT_UTF8_TABLE; ///////////////////////////////////////////////////////////////////////////// /// lowercaser remap range struct CSphRemapRange { int m_iStart; int m_iEnd; int m_iRemapStart; CSphRemapRange () : m_iStart ( -1 ) , m_iEnd ( -1 ) , m_iRemapStart ( -1 ) {} CSphRemapRange ( int iStart, int iEnd, int iRemapStart ) : m_iStart ( iStart ) , m_iEnd ( iEnd ) , m_iRemapStart ( iRemapStart ) {} }; inline bool operator < ( const CSphRemapRange & a, const CSphRemapRange & b ) { return a.m_iStart < b.m_iStart; } /// lowercaser class CSphLowercaser { public: CSphLowercaser (); ~CSphLowercaser (); void Reset (); void SetRemap ( const CSphLowercaser * pLC ); void AddRemaps ( const CSphVector & dRemaps, DWORD uFlags ); void AddSpecials ( const char * sSpecials ); uint64_t GetFNV () const; public: const CSphLowercaser & operator = ( const CSphLowercaser & rhs ); public: inline int ToLower ( int iCode ) const { if ( iCode<0 || iCode>=MAX_CODE ) return iCode; register int * pChunk = m_pChunk [ iCode >> CHUNK_BITS ]; if ( pChunk ) return pChunk [ iCode & CHUNK_MASK ]; return 0; } protected: static const int CHUNK_COUNT = 0x300; static const int CHUNK_BITS = 8; static const int CHUNK_SIZE = 1 << CHUNK_BITS; static const int CHUNK_MASK = CHUNK_SIZE - 1; static const int MAX_CODE = CHUNK_COUNT * CHUNK_SIZE; int m_iChunks; ///< how much chunks are actually allocated int * m_pData; ///< chunks themselves int * m_pChunk [ CHUNK_COUNT ]; ///< pointers to non-empty chunks }; ///////////////////////////////////////////////////////////////////////////// struct CSphSavedFile { CSphString m_sFilename; SphOffset_t m_uSize; SphOffset_t m_uCTime; SphOffset_t m_uMTime; DWORD m_uCRC32; CSphSavedFile (); }; struct CSphTokenizerSettings { int m_iType; CSphString m_sCaseFolding; int m_iMinWordLen; CSphString m_sSynonymsFile; CSphString m_sBoundary; CSphString m_sIgnoreChars; int m_iNgramLen; CSphString m_sNgramChars; CSphString m_sBlendChars; CSphString m_sBlendMode; CSphTokenizerSettings (); }; struct CSphMultiformContainer; /// generic tokenizer class ISphTokenizer { public: /// trivial ctor ISphTokenizer(); /// trivial dtor virtual ~ISphTokenizer () {} public: /// set new translation table /// returns true on success, false on failure virtual bool SetCaseFolding ( const char * sConfig, CSphString & sError ); /// add additional range to translation table virtual void AddCaseFolding ( CSphRemapRange & tRange ); /// add special chars to translation table (SBCS only, for now) /// updates lowercaser so that these remap to -1 virtual void AddSpecials ( const char * sSpecials ); /// set ignored characters virtual bool SetIgnoreChars ( const char * sIgnored, CSphString & sError ); /// set n-gram characters (for CJK n-gram indexing) virtual bool SetNgramChars ( const char *, CSphString & ) { return true; } /// set n-gram length (for CJK n-gram indexing) virtual void SetNgramLen ( int ) {} /// load synonyms list virtual bool LoadSynonyms ( const char * sFilename, CSphString & sError ) = 0; /// set phrase boundary chars virtual bool SetBoundary ( const char * sConfig, CSphString & sError ); /// set blended characters virtual bool SetBlendChars ( const char * sConfig, CSphString & sError ); /// set blended tokens processing mode virtual bool SetBlendMode ( const char * sMode, CSphString & sError ); /// setup tokenizer using given settings virtual void Setup ( const CSphTokenizerSettings & tSettings ); /// create a tokenizer using the given settings static ISphTokenizer * Create ( const CSphTokenizerSettings & tSettings, CSphString & sError ); /// create a token filter static ISphTokenizer * CreateTokenFilter ( ISphTokenizer * pTokenizer, const CSphMultiformContainer * pContainer ); /// save tokenizer settings to a stream virtual const CSphTokenizerSettings & GetSettings () const { return m_tSettings; } /// get synonym file info virtual const CSphSavedFile & GetSynFileInfo () const { return m_tSynFileInfo; } public: /// pass next buffer virtual void SetBuffer ( BYTE * sBuffer, int iLength ) = 0; /// get next token virtual BYTE * GetToken () = 0; /// calc codepoint length virtual int GetCodepointLength ( int iCode ) const = 0; /// handle tokens less than min_word_len if they match filter virtual void EnableQueryParserMode ( bool bEnable ) { m_bQueryMode = bEnable; m_bShortTokenFilter = bEnable; m_uBlendVariants = BLEND_TRIM_NONE; } /// enable indexing-time sentence boundary detection, and paragraph indexing virtual bool EnableSentenceIndexing ( CSphString & sError ); /// enable zone indexing virtual bool EnableZoneIndexing ( CSphString & sError ); /// enable tokenized multiform tracking virtual void EnableTokenizedMultiformTracking () {} /// get last token length, in codepoints virtual int GetLastTokenLen () const { return m_iLastTokenLen; } /// get last token boundary flag (true if there was a boundary before the token) virtual bool GetBoundary () { return m_bTokenBoundary; } /// get byte offset of the last boundary character virtual int GetBoundaryOffset () { return m_iBoundaryOffset; } /// was last token a special one? virtual bool WasTokenSpecial () { return m_bWasSpecial; } /// get amount of overshort keywords skipped before this token virtual int GetOvershortCount () { return m_iOvershortCount; } /// get original tokenized multiform (if any); NULL means there was none virtual BYTE * GetTokenizedMultiform () { return NULL; } virtual bool TokenIsBlended () const { return m_bBlended; } virtual bool TokenIsBlendedPart () const { return m_bBlendedPart; } virtual int SkipBlended () { return 0; } public: /// spawn a clone of my own virtual ISphTokenizer * Clone ( bool bEscaped ) const = 0; /// SBCS or UTF-8? virtual bool IsUtf8 () const = 0; /// start buffer point of last token virtual const char * GetTokenStart () const = 0; /// end buffer point of last token (exclusive, ie. *GetTokenEnd() is already NOT part of a token!) virtual const char * GetTokenEnd () const = 0; /// current buffer ptr virtual const char * GetBufferPtr () const = 0; /// buffer end virtual const char * GetBufferEnd () const = 0; /// set new buffer ptr (must be within current bounds) virtual void SetBufferPtr ( const char * sNewPtr ) = 0; // get settings hash uint64_t GetSettingsFNV () const { return m_tLC.GetFNV(); } protected: virtual bool RemapCharacters ( const char * sConfig, DWORD uFlags, const char * sSource, bool bCanRemap, CSphString & sError ); virtual bool AddSpecialsSPZ ( const char * sSpecials, const char * sDirective, CSphString & sError ); protected: static const int MAX_SYNONYM_LEN = 1024; ///< max synonyms map-from length, bytes static const BYTE BLEND_TRIM_NONE = 1; static const BYTE BLEND_TRIM_HEAD = 2; static const BYTE BLEND_TRIM_TAIL = 4; static const BYTE BLEND_TRIM_BOTH = 8; CSphLowercaser m_tLC; ///< my lowercaser int m_iLastTokenLen; ///< last token length, in codepoints bool m_bTokenBoundary; ///< last token boundary flag (true after boundary codepoint followed by separator) bool m_bBoundary; ///< boundary flag (true immediately after boundary codepoint) int m_iBoundaryOffset; ///< boundary character offset (in bytes) bool m_bWasSpecial; ///< special token flag bool m_bEscaped; ///< backslash handling flag int m_iOvershortCount; ///< skipped overshort tokens count bool m_bBlended; ///< whether last token (as in just returned from GetToken()) was blended bool m_bNonBlended; ///< internal, whether there were any normal chars in that blended token bool m_bBlendedPart; ///< whether last token is a normal subtoken of a blended token bool m_bBlendAdd; ///< whether we have more pending blended variants (of current accumulator) to return BYTE m_uBlendVariants; ///< mask of blended variants as requested by blend_mode (see BLEND_TRIM_xxx flags) BYTE m_uBlendVariantsPending; ///< mask of pending blended variants (we clear bits as we return variants) bool m_bBlendSkipPure; ///< skip purely blended tokens bool m_bShortTokenFilter; ///< short token filter flag bool m_bQueryMode; ///< is this indexing time or searching time? bool m_bDetectSentences; ///< should we detect sentence boundaries? CSphTokenizerSettings m_tSettings; ///< tokenizer settings CSphSavedFile m_tSynFileInfo; ///< synonyms file info public: bool m_bPhrase; }; /// parse charset table bool sphParseCharset ( const char * sCharset, CSphVector & dRemaps ); /// create SBCS tokenizer ISphTokenizer * sphCreateSBCSTokenizer (); /// create UTF-8 tokenizer ISphTokenizer * sphCreateUTF8Tokenizer (); /// create UTF-8 tokenizer with n-grams support (for CJK n-gram indexing) ISphTokenizer * sphCreateUTF8NgramTokenizer (); ///////////////////////////////////////////////////////////////////////////// // DICTIONARIES ///////////////////////////////////////////////////////////////////////////// struct CSphDictSettings { CSphString m_sMorphology; CSphString m_sStopwords; CSphString m_sWordforms; int m_iMinStemmingLen; bool m_bWordDict; bool m_bCrc32; CSphDictSettings () : m_iMinStemmingLen ( 1 ) , m_bWordDict ( false ) , m_bCrc32 ( !USE_64BIT ) {} }; /// abstract word dictionary interface struct CSphWordHit; struct CSphDict { /// virtualizing dtor virtual ~CSphDict () {} /// get word ID by word, "text" version /// may apply stemming and modify word inplace /// returns 0 for stopwords virtual SphWordID_t GetWordID ( BYTE * pWord ) = 0; /// get word ID by word, "text" version /// may apply stemming and modify word inplace /// accepts words with already prepended MAGIC_WORD_HEAD /// appends MAGIC_WORD_TAIL /// returns 0 for stopwords virtual SphWordID_t GetWordIDWithMarkers ( BYTE * pWord ) { return GetWordID ( pWord ); } /// get word ID by word, "text" version /// does NOT apply stemming /// accepts words with already prepended MAGIC_WORD_HEAD_NONSTEMMED /// returns 0 for stopwords virtual SphWordID_t GetWordIDNonStemmed ( BYTE * pWord ) { return GetWordID ( pWord ); } /// get word ID by word, "binary" version /// only used with prefix/infix indexing /// must not apply stemming and modify anything /// filters stopwords on request virtual SphWordID_t GetWordID ( const BYTE * pWord, int iLen, bool bFilterStops ) = 0; /// apply stemmers to the given word virtual void ApplyStemmers ( BYTE * ) {} /// load stopwords from given files virtual void LoadStopwords ( const char * sFiles, ISphTokenizer * pTokenizer ) = 0; /// load wordforms from a given file virtual bool LoadWordforms ( const char * sFile, ISphTokenizer * pTokenizer, const char * sIndex ) = 0; /// set morphology virtual bool SetMorphology ( const char * szMorph, bool bUseUTF8, CSphString & sError ) = 0; virtual bool HasMorphology () const { return false; } /// setup dictionary using settings virtual void Setup ( const CSphDictSettings & tSettings ) = 0; /// get dictionary settings virtual const CSphDictSettings & GetSettings () const = 0; /// stopwords file infos virtual const CSphVector & GetStopwordsFileInfos () = 0; /// wordforms file infos virtual const CSphSavedFile & GetWordformsFileInfo () = 0; /// get multiwordforms virtual const CSphMultiformContainer * GetMultiWordforms () const = 0; /// check what given word is stopword virtual bool IsStopWord ( const BYTE * pWord ) const = 0; public: /// enable actually collecting keywords (needed for stopwords/wordforms loading) virtual void HitblockBegin () {} /// callback to let dictionary do hit block post-processing virtual void HitblockPatch ( CSphWordHit *, int ) {} /// resolve temporary hit block wide wordid (!) back to keyword virtual const char * HitblockGetKeyword ( SphWordID_t ) { return NULL; } /// check current memory usage virtual int HitblockGetMemUse () { return 0; } /// hit block dismissed virtual void HitblockReset () {} public: /// begin creating dictionary file, setup any needed internal structures virtual void DictBegin ( int iTmpDictFD, int iDictFD, int iDictLimit ); /// add next keyword entry to final dict virtual void DictEntry ( SphWordID_t uWordID, BYTE * sKeyword, int iDocs, int iHits, SphOffset_t iDoclistOffset, SphOffset_t iDoclistLength ); /// flush last entry virtual void DictEndEntries ( SphOffset_t iDoclistOffset ); /// end indexing, store dictionary and checkpoints virtual bool DictEnd ( SphOffset_t * pCheckpointsPos, int * pCheckpointsCount, int iMemLimit, CSphString & sError ); /// check whether there were any errors during indexing virtual bool DictIsError () const; /// make clone virtual CSphDict * Clone () const { return NULL; } virtual bool HasState () const { return false; } }; /// CRC32/FNV64 dictionary factory CSphDict * sphCreateDictionaryCRC ( const CSphDictSettings & tSettings, ISphTokenizer * pTokenizer, CSphString & sError, const char * sIndex ); /// keyword-storing dictionary factory CSphDict * sphCreateDictionaryKeywords ( const CSphDictSettings & tSettings, ISphTokenizer * pTokenizer, CSphString & sError, const char * sIndex ); /// clear wordform cache void sphShutdownWordforms (); ///////////////////////////////////////////////////////////////////////////// // DATASOURCES ///////////////////////////////////////////////////////////////////////////// /// hit position storage type typedef DWORD Hitpos_t; /// empty hit value #define EMPTY_HIT 0 /// hit processing tools /// (because we now allow multiple actual formats within a single storage type!) template < int FIELD_BITS > class Hitman_c { protected: enum { POS_BITS = 31 - FIELD_BITS, FIELD_OFF = 32 - FIELD_BITS, FIELDEND_OFF = 31 - FIELD_BITS, FIELDEND_MASK = (1UL << POS_BITS), POS_MASK = (1UL << POS_BITS) - 1, }; public: static Hitpos_t Create ( int iField, int iPos ) { return ( iField << FIELD_OFF ) + ( iPos & POS_MASK ); } static Hitpos_t Create ( int iField, int iPos, bool bEnd ) { return ( iField << FIELD_OFF ) + ( ((int)bEnd) << FIELDEND_OFF ) + ( iPos & POS_MASK ); } static inline int GetField ( Hitpos_t uHitpos ) { return uHitpos >> FIELD_OFF; } static inline int GetPos ( Hitpos_t uHitpos ) { return uHitpos & POS_MASK; } static inline bool IsEnd ( Hitpos_t uHitpos ) { return ( uHitpos & FIELDEND_MASK )!=0; } static inline DWORD GetLCS ( Hitpos_t uHitpos ) { return uHitpos & ~FIELDEND_MASK; } static void AddPos ( Hitpos_t * pHitpos, int iAdd ) { // FIXME! add range checks (eg. so that 0:0-1 does not overflow) *pHitpos += iAdd; } static Hitpos_t CreateSum ( Hitpos_t uHitpos, int iAdd ) { // FIXME! add range checks (eg. so that 0:0-1 does not overflow) return ( uHitpos+iAdd ) & ~FIELDEND_MASK; } static void SetEndMarker ( Hitpos_t * pHitpos ) { *pHitpos |= FIELDEND_MASK; } }; /// hit info struct CSphWordHit { SphDocID_t m_iDocID; ///< document ID SphWordID_t m_iWordID; ///< word ID in current dictionary Hitpos_t m_iWordPos; ///< word position in current document }; /// attribute locator within the row struct CSphAttrLocator { // OPTIMIZE? try packing these int m_iBitOffset; int m_iBitCount; bool m_bDynamic; CSphAttrLocator () : m_iBitOffset ( -1 ) , m_iBitCount ( -1 ) , m_bDynamic ( false ) {} inline bool IsBitfield () const { return ( m_iBitCount> ROWITEM_SHIFT; if ( tLoc.m_iBitCount==ROWITEM_BITS ) return pRow[iItem]; if ( tLoc.m_iBitCount==2*ROWITEM_BITS ) // FIXME? write a generalized version, perhaps return SphAttr_t ( pRow[iItem] ) + ( SphAttr_t ( pRow[iItem+1] ) << ROWITEM_BITS ); int iShift = tLoc.m_iBitOffset & ( ( 1 << ROWITEM_SHIFT )-1 ); return ( pRow[iItem] >> iShift ) & ( ( 1UL << tLoc.m_iBitCount )-1 ); } /// setter inline void sphSetRowAttr ( CSphRowitem * pRow, const CSphAttrLocator & tLoc, SphAttr_t uValue ) { assert(pRow); int iItem = tLoc.m_iBitOffset >> ROWITEM_SHIFT; if ( tLoc.m_iBitCount==2*ROWITEM_BITS ) { // FIXME? write a generalized version, perhaps pRow[iItem] = CSphRowitem ( uValue & ( ( SphAttr_t(1) << ROWITEM_BITS )-1 ) ); pRow[iItem+1] = CSphRowitem ( uValue >> ROWITEM_BITS ); } else if ( tLoc.m_iBitCount==ROWITEM_BITS ) { pRow[iItem] = CSphRowitem ( uValue ); } else { int iShift = tLoc.m_iBitOffset & ( ( 1 << ROWITEM_SHIFT )-1); CSphRowitem uMask = ( ( 1UL << tLoc.m_iBitCount )-1 ) << iShift; pRow[iItem] &= ~uMask; pRow[iItem] |= ( uMask & ( uValue << iShift ) ); } } /// pack length into row storage (22 bits max) /// returns number of bytes used inline int sphPackStrlen ( BYTE * pRow, int iLen ) { assert ( iLen>=0 && iLen<0x400000 ); if ( iLen<0x80 ) { pRow[0] = BYTE(iLen); return 1; } else if ( iLen<0x4000 ) { pRow[0] = BYTE ( ( iLen>>8 ) | 0x80 ); pRow[1] = BYTE ( iLen ); return 2; } else { pRow[0] = BYTE ( ( iLen>>16 ) | 0xc0 ); pRow[1] = BYTE ( iLen>>8 ); pRow[2] = BYTE ( iLen ); return 3; } } /// unpack string attr from row storage (22 bits length max) /// returns unpacked length; stores pointer to string data if required inline int sphUnpackStr ( const BYTE * pRow, const BYTE ** ppStr ) { int v = *pRow++; if ( v & 0x80 ) { if ( v & 0x40 ) { v = ( int ( v & 0x3f )<<16 ) + ( int ( *pRow++ )<<8 ); v += ( *pRow++ ); // MUST be separate statement; cf. sequence point } else { v = ( int ( v & 0x3f )<<8 ) + ( *pRow++ ); } } if ( ppStr ) *ppStr = pRow; return v; } /// search query match (document info plus weight/tag) class CSphMatch { public: SphDocID_t m_iDocID; ///< document ID const CSphRowitem * m_pStatic; ///< static part (stored in and owned by the index) CSphRowitem * m_pDynamic; ///< dynamic part (computed per query; owned by the match) int m_iWeight; ///< my computed weight int m_iTag; ///< my index tag public: /// ctor. clears everything CSphMatch () : m_iDocID ( 0 ) , m_pStatic ( NULL ) , m_pDynamic ( NULL ) , m_iWeight ( 0 ) , m_iTag ( 0 ) { } /// copy ctor. just in case CSphMatch ( const CSphMatch & rhs ) : m_pStatic ( 0 ) , m_pDynamic ( NULL ) { *this = rhs; } /// dtor. frees everything ~CSphMatch () { #ifndef NDEBUG if ( m_pDynamic ) m_pDynamic--; #endif SafeDeleteArray ( m_pDynamic ); } /// reset void Reset ( int iDynamic ) { // check that we're either initializing a new one, or NOT changing the current size assert ( iDynamic>=0 ); assert ( !m_pDynamic || iDynamic==(int)m_pDynamic[-1] ); m_iDocID = 0; if ( !m_pDynamic && iDynamic ) { #ifndef NDEBUG m_pDynamic = new CSphRowitem [ iDynamic+1 ]; *m_pDynamic++ = iDynamic; #else m_pDynamic = new CSphRowitem [ iDynamic ]; #endif } } public: /// assignment void Clone ( const CSphMatch & rhs, int iDynamic ) { // check that we're either initializing a new one, or NOT changing the current size assert ( iDynamic>=0 ); assert ( !m_pDynamic || iDynamic==(int)m_pDynamic[-1] ); m_iDocID = rhs.m_iDocID; m_iWeight = rhs.m_iWeight; m_pStatic = rhs.m_pStatic; m_iTag = rhs.m_iTag; if ( iDynamic ) { if ( !m_pDynamic ) { #ifndef NDEBUG m_pDynamic = new CSphRowitem [ iDynamic+1 ]; *m_pDynamic++ = iDynamic; #else m_pDynamic = new CSphRowitem [ iDynamic ]; #endif } assert ( rhs.m_pDynamic ); assert ( m_pDynamic[-1]==rhs.m_pDynamic[-1] ); // ensure we're not changing X to Y memcpy ( m_pDynamic, rhs.m_pDynamic, iDynamic*sizeof(CSphRowitem) ); } } public: /// integer getter SphAttr_t GetAttr ( const CSphAttrLocator & tLoc ) const { // m_pRowpart[tLoc.m_bDynamic] is 30% faster on MSVC 2005 // same time on gcc 4.x though, ~1 msec per 1M calls, so lets avoid the hassle for now if ( tLoc.m_iBitOffset>=0 ) return sphGetRowAttr ( tLoc.m_bDynamic ? m_pDynamic : m_pStatic, tLoc ); if ( tLoc.IsID() ) return m_iDocID; assert ( false && "Unknown negative-bitoffset locator" ); return 0; } /// float getter float GetAttrFloat ( const CSphAttrLocator & tLoc ) const { return sphDW2F ( (DWORD)sphGetRowAttr ( tLoc.m_bDynamic ? m_pDynamic : m_pStatic, tLoc ) ); }; /// integer setter void SetAttr ( const CSphAttrLocator & tLoc, SphAttr_t uValue ) { if ( tLoc.IsID() ) { // m_iDocID = uValue; return; } assert ( tLoc.m_bDynamic ); assert ( tLoc.GetMaxRowitem() < (int)m_pDynamic[-1] ); sphSetRowAttr ( m_pDynamic, tLoc, uValue ); } /// float setter void SetAttrFloat ( const CSphAttrLocator & tLoc, float fValue ) { assert ( tLoc.m_bDynamic ); assert ( tLoc.GetMaxRowitem() < (int)m_pDynamic[-1] ); sphSetRowAttr ( m_pDynamic, tLoc, sphF2DW ( fValue ) ); } /// MVA getter const DWORD * GetAttrMVA ( const CSphAttrLocator & tLoc, const DWORD * pPool ) const; private: /// "manually" prevent copying const CSphMatch & operator = ( const CSphMatch & ) { assert ( 0 && "internal error (CSphMatch::operator= called)" ); return *this; } }; /// specialized swapper inline void Swap ( CSphMatch & a, CSphMatch & b ) { Swap ( a.m_iDocID, b.m_iDocID ); Swap ( a.m_pStatic, b.m_pStatic ); Swap ( a.m_pDynamic, b.m_pDynamic ); Swap ( a.m_iWeight, b.m_iWeight ); Swap ( a.m_iTag, b.m_iTag ); } /// source statistics struct CSphSourceStats { int m_iTotalDocuments; ///< how much documents int64_t m_iTotalBytes; ///< how much bytes /// ctor CSphSourceStats () { Reset (); } /// reset void Reset () { m_iTotalDocuments = 0; m_iTotalBytes = 0; } }; /// known multi-valued attr sources enum ESphAttrSrc { SPH_ATTRSRC_NONE = 0, ///< not multi-valued SPH_ATTRSRC_FIELD = 1, ///< get attr values from text field SPH_ATTRSRC_QUERY = 2, ///< get attr values from SQL query SPH_ATTRSRC_RANGEDQUERY = 3 ///< get attr values from ranged SQL query }; /// wordpart processing type enum ESphWordpart { SPH_WORDPART_WHOLE = 0, ///< whole-word SPH_WORDPART_PREFIX = 1, ///< prefix SPH_WORDPART_INFIX = 2 ///< infix }; /// column unpack format enum ESphUnpackFormat { SPH_UNPACK_NONE = 0, SPH_UNPACK_ZLIB = 1, SPH_UNPACK_MYSQL_COMPRESS = 2 }; /// aggregate function to apply enum ESphAggrFunc { SPH_AGGR_NONE, SPH_AGGR_AVG, SPH_AGGR_MIN, SPH_AGGR_MAX, SPH_AGGR_SUM }; /// column evaluation stage enum ESphEvalStage { SPH_EVAL_STATIC = 0, ///< static data, no real evaluation needed SPH_EVAL_OVERRIDE, ///< static but possibly overridden SPH_EVAL_PREFILTER, ///< expression needed for full-text candidate matches filtering SPH_EVAL_PRESORT, ///< expression needed for final matches sorting SPH_EVAL_SORTER, ///< expression evaluated by sorter object SPH_EVAL_FINAL ///< expression not (!) used in filters/sorting; can be postponed until final result set cooking }; /// source column info struct CSphColumnInfo { CSphString m_sName; ///< column name ESphAttr m_eAttrType; ///< attribute type ESphWordpart m_eWordpart; ///< wordpart processing type bool m_bIndexed; ///< whether to index this column as fulltext field too int m_iIndex; ///< index into source result set (-1 for joined fields) CSphAttrLocator m_tLocator; ///< attribute locator in the row ESphAttrSrc m_eSrc; ///< attr source (for multi-valued attrs only) CSphString m_sQuery; ///< query to retrieve values (for multi-valued attrs only) CSphString m_sQueryRange; ///< query to retrieve range (for multi-valued attrs only) CSphRefcountedPtr m_pExpr; ///< evaluator for expression items ESphAggrFunc m_eAggrFunc; ///< aggregate function on top of expression (for GROUP BY) ESphEvalStage m_eStage; ///< column evaluation stage (who and how computes this column) bool m_bPayload; bool m_bFilename; ///< column is a file name /// handy ctor CSphColumnInfo ( const char * sName=NULL, ESphAttr eType=SPH_ATTR_NONE ) : m_sName ( sName ) , m_eAttrType ( eType ) , m_eWordpart ( SPH_WORDPART_WHOLE ) , m_bIndexed ( false ) , m_iIndex ( -1 ) , m_eSrc ( SPH_ATTRSRC_NONE ) , m_pExpr ( NULL ) , m_eAggrFunc ( SPH_AGGR_NONE ) , m_eStage ( SPH_EVAL_STATIC ) , m_bPayload ( false ) , m_bFilename ( false ) { m_sName.ToLower (); } /// equality comparison checks name, type, and locator bool operator == ( const CSphColumnInfo & rhs ) const { return m_sName==rhs.m_sName && m_eAttrType==rhs.m_eAttrType && m_tLocator.m_iBitCount==rhs.m_tLocator.m_iBitCount && m_tLocator.m_iBitOffset==rhs.m_tLocator.m_iBitOffset && m_tLocator.m_bDynamic==rhs.m_tLocator.m_bDynamic; } }; /// source schema struct CSphSchema { CSphString m_sName; ///< my human-readable name CSphVector m_dFields; ///< my fulltext-searchable fields int m_iBaseFields; ///< how much fields are base, how much are additional (only affects indexer) public: /// ctor explicit CSphSchema ( const char * sName="(nameless)" ) : m_sName ( sName ), m_iBaseFields ( 0 ), m_iStaticSize ( 0 ) {} /// get field index by name /// returns -1 if not found int GetFieldIndex ( const char * sName ) const; /// get attribute index by name /// returns -1 if not found int GetAttrIndex ( const char * sName ) const; /// checks if two schemas fully match (ie. fields names, attr names, types and locators are the same) /// describe mismatch (if any) to sError bool CompareTo ( const CSphSchema & rhs, CSphString & sError ) const; /// reset fields and attrs void Reset (); /// reset attrs only void ResetAttrs (); /// get row size (static+dynamic combined) int GetRowSize () const { return m_iStaticSize + m_dDynamicUsed.GetLength(); } /// get static row part size int GetStaticSize () const { return m_iStaticSize; } /// get dynamic row part size int GetDynamicSize () const { return m_dDynamicUsed.GetLength(); } /// get attrs count int GetAttrsCount () const { return m_dAttrs.GetLength(); } /// get attr by index const CSphColumnInfo & GetAttr ( int iIndex ) const { return m_dAttrs[iIndex]; } /// get attr by name const CSphColumnInfo * GetAttr ( const char * sName ) const; /// add attr void AddAttr ( const CSphColumnInfo & tAttr, bool bDynamic ); /// remove static attr (but do NOT recompute locations; for overrides) /// WARNING, THIS IS A HACK THAT WILL LIKELY BREAK THE SCHEMA, DO NOT USE THIS UNLESS ABSOLUTELY SURE! void RemoveAttr ( int iIndex ); protected: CSphVector m_dAttrs; ///< all my attributes CSphVector m_dStaticUsed; ///< static row part map (amount of used bits in each rowitem) CSphVector m_dDynamicUsed; ///< dynamic row part map int m_iStaticSize; ///< static row size (can be different from m_dStaticUsed.GetLength() because of gaps) }; /// HTML stripper class CSphHTMLStripper { public: explicit CSphHTMLStripper ( bool bDefaultTags ); bool SetIndexedAttrs ( const char * sConfig, CSphString & sError ); bool SetRemovedElements ( const char * sConfig, CSphString & sError ); bool SetZones ( const char * sZones, CSphString & sError ); void EnableParagraphs (); void Strip ( BYTE * sData ) const; public: struct StripperTag_t { CSphString m_sTag; ///< tag name int m_iTagLen; ///< tag name length bool m_bInline; ///< whether this tag is inline bool m_bIndexAttrs; ///< whether to index attrs bool m_bRemove; ///< whether to remove contents bool m_bPara; ///< whether to mark a paragraph boundary bool m_bZone; ///< whether to mark a zone boundary bool m_bZonePrefix; ///< whether the zone name is a full name or a prefix CSphVector m_dAttrs; ///< attr names to index StripperTag_t () : m_iTagLen ( 0 ) , m_bInline ( false ) , m_bIndexAttrs ( false ) , m_bRemove ( false ) , m_bPara ( false ) , m_bZone ( false ) , m_bZonePrefix ( false ) {} inline bool operator < ( const StripperTag_t & rhs ) const { return strcmp ( m_sTag.cstr(), rhs.m_sTag.cstr() )<0; } }; /// finds appropriate tag and zone name ( tags zone name could be prefix only ) /// advances source to the end of the tag const BYTE * FindTag ( const BYTE * sSrc, const StripperTag_t ** ppTag, const BYTE ** ppZoneName, int * pZoneNameLen ) const; bool IsValidTagStart ( int iCh ) const; protected: static const int MAX_CHAR_INDEX = 28; ///< max valid char index (a-z, underscore, colon) CSphVector m_dTags; ///< known tags to index attrs and/or to remove contents int m_dStart[MAX_CHAR_INDEX]; ///< maps index of the first tag name char to start offset in m_dTags int m_dEnd[MAX_CHAR_INDEX]; ///< maps index of the first tag name char to end offset in m_dTags protected: int GetCharIndex ( int iCh ) const; ///< calcs index by raw char void UpdateTags (); ///< sorts tags, updates internal helpers }; /// indexing-related source settings /// NOTE, newly added fields should be synced with CSphSource::Setup() struct CSphSourceSettings { int m_iMinPrefixLen; ///< min indexable prefix (0 means don't index prefixes) int m_iMinInfixLen; ///< min indexable infix length (0 means don't index infixes) int m_iBoundaryStep; ///< additional boundary word position increment bool m_bIndexExactWords; ///< exact (non-stemmed) word indexing flag int m_iOvershortStep; ///< position step on overshort token (default is 1) int m_iStopwordStep; ///< position step on stopword token (default is 1) bool m_bIndexSP; ///< whether to index sentence and paragraph delimiters CSphVector m_dPrefixFields; ///< list of prefix fields CSphVector m_dInfixFields; ///< list of infix fields explicit CSphSourceSettings (); ESphWordpart GetWordpart ( const char * sField, bool bWordDict ); }; /// hit vector interface /// because specific position type might vary (dword, qword, etc) /// but we don't want to template and instantiate everything because of that class ISphHits { public: int Length () const { return m_dData.GetLength(); } const CSphWordHit * First () const { return m_dData.Begin(); } const CSphWordHit * Last () const { return &m_dData.Last(); } void AddHit ( SphDocID_t uDocid, SphWordID_t uWordid, Hitpos_t uPos ) { if ( uWordid ) { CSphWordHit & tHit = m_dData.Add(); tHit.m_iDocID = uDocid; tHit.m_iWordID = uWordid; tHit.m_iWordPos = uPos; } } public: CSphVector m_dData; }; struct SphRange_t { int m_iStart; int m_iLength; }; /// generic data source class CSphSource : public CSphSourceSettings { public: CSphMatch m_tDocInfo; ///< current document info CSphVector m_dStrAttrs; ///< current document string attrs CSphVector m_dMva; ///< MVA storage for mva64 public: /// ctor explicit CSphSource ( const char * sName ); /// dtor virtual ~CSphSource (); /// set dictionary void SetDict ( CSphDict * dict ); /// set HTML stripping mode /// /// sExtractAttrs defines what attributes to store. format is "img=alt; a=alt,title". /// empty string means to strip all tags; NULL means to disable stripping. /// /// sRemoveElements defines what elements to cleanup. format is "style, script" /// /// on failure, returns false and fills sError bool SetStripHTML ( const char * sExtractAttrs, const char * sRemoveElements, bool bDetectParagraphs, const char * sZones, CSphString & sError ); /// set tokenizer void SetTokenizer ( ISphTokenizer * pTokenizer ); /// set rows dump file virtual void SetDumpRows ( FILE * ) {} /// get stats virtual const CSphSourceStats & GetStats (); /// updates schema fields and attributes /// updates pInfo if it's empty; checks for match if it's not /// must be called after IterateStart(); will always fail otherwise virtual bool UpdateSchema ( CSphSchema * pInfo, CSphString & sError ); /// setup misc indexing settings (prefix/infix/exact-word indexing, position steps) void Setup ( const CSphSourceSettings & tSettings ); public: /// connect to the source (eg. to the database) /// connection settings are specific for each source type and as such /// are implemented in specific descendants virtual bool Connect ( CSphString & sError ) = 0; /// disconnect from the source virtual void Disconnect () = 0; /// check if there are any attributes configured /// note that there might be NO actual attributes in the case if configured /// ones do not match those actually returned by the source virtual bool HasAttrsConfigured () = 0; /// check if there are any joined fields virtual bool HasJoinedFields () { return false; } /// begin indexing this source /// to be implemented by descendants virtual bool IterateStart ( CSphString & sError ) = 0; /// get next document /// to be implemented by descendants /// returns false on error /// returns true and fills m_tDocInfo on success /// returns true and sets m_tDocInfo.m_iDocID to 0 on eof virtual bool IterateDocument ( CSphString & sError ) = 0; /// get next hits chunk for current document /// to be implemented by descendants /// returns NULL when there are no more hits /// returns pointer to hit vector (with at most MAX_SOURCE_HITS) on success /// fills out-string with error message on failure virtual ISphHits * IterateHits ( CSphString & sError ) = 0; /// get joined hits from joined fields (w/o attached docinfos) /// returns false and fills out-string with error message on failure /// returns true and sets m_tDocInfo.m_uDocID to 0 on eof /// returns true and sets m_tDocInfo.m_uDocID to non-0 on success virtual ISphHits * IterateJoinedHits ( CSphString & sError ); /// begin iterating values of out-of-document multi-valued attribute iAttr /// will fail if iAttr is out of range, or is not multi-valued /// can also fail if configured settings are invalid (eg. SQL query can not be executed) virtual bool IterateMultivaluedStart ( int iAttr, CSphString & sError ) = 0; /// get next multi-valued (id,attr-value) or (id, offset) for mva64 tuple to m_tDocInfo virtual bool IterateMultivaluedNext () = 0; /// begin iterating values of multi-valued attribute iAttr stored in a field /// will fail if iAttr is out of range, or is not multi-valued virtual SphRange_t IterateFieldMVAStart ( int iAttr ) = 0; /// begin iterating kill list virtual bool IterateKillListStart ( CSphString & sError ) = 0; /// get next kill list doc id virtual bool IterateKillListNext ( SphDocID_t & tDocId ) = 0; /// post-index callback /// gets called when the indexing is succesfully (!) over virtual void PostIndex () {} protected: ISphTokenizer * m_pTokenizer; ///< my tokenizer CSphDict * m_pDict; ///< my dict CSphSourceStats m_tStats; ///< my stats CSphSchema m_tSchema; ///< my schema bool m_bStripHTML; ///< whether to strip HTML CSphHTMLStripper * m_pStripper; ///< my HTML stripper int m_iNullIds; int m_iMaxIds; SphDocID_t VerifyID ( SphDocID_t uID ); }; /// how to handle IO errors in file fields enum ESphOnFileFieldError { FFE_IGNORE_FIELD, FFE_SKIP_DOCUMENT, FFE_FAIL_INDEX }; /// generic document source /// provides multi-field support and generic tokenizer class CSphSource_Document : public CSphSource { public: /// ctor explicit CSphSource_Document ( const char * sName ); /// dtor virtual ~CSphSource_Document () { SafeDeleteArray ( m_pReadFileBuffer ); } /// my generic tokenizer virtual bool IterateDocument ( CSphString & sError ); virtual ISphHits * IterateHits ( CSphString & sError ); void BuildHits ( CSphString & sError, bool bSkipEndMarker ); /// field data getter /// to be implemented by descendants virtual BYTE ** NextDocument ( CSphString & sError ) = 0; virtual void SetDumpRows ( FILE * fpDumpRows ) { m_fpDumpRows = fpDumpRows; } virtual SphRange_t IterateFieldMVAStart ( int iAttr ); protected: int ParseFieldMVA ( CSphVector < DWORD > & dMva, const char * szValue, bool bMva64 ); bool CheckFileField ( const BYTE * sField ); int LoadFileField ( BYTE ** ppField, CSphString & sError ); bool BuildZoneHits ( SphDocID_t uDocid, BYTE * sWord ); void BuildSubstringHits ( SphDocID_t uDocid, bool bPayload, ESphWordpart eWordpart, bool bSkipEndMarker ); void BuildRegularHits ( SphDocID_t uDocid, bool bPayload, bool bSkipEndMarker ); protected: ISphHits m_tHits; ///< my hitvector protected: char * m_pReadFileBuffer; int m_iReadFileBufferSize; ///< size of read buffer for the 'sql_file_field' fields int m_iMaxFileBufferSize; ///< max size of read buffer for the 'sql_file_field' fields ESphOnFileFieldError m_eOnFileFieldError; FILE * m_fpDumpRows; protected: struct CSphBuildHitsState_t { bool m_bProcessingHits; bool m_bDocumentDone; BYTE ** m_dFields; int m_iStartPos; Hitpos_t m_iHitPos; int m_iField; int m_iStartField; int m_iEndField; int m_iBuildLastStep; CSphBuildHitsState_t (); }; CSphBuildHitsState_t m_tState; int m_iMaxHits; }; struct CSphUnpackInfo { ESphUnpackFormat m_eFormat; CSphString m_sName; }; struct CSphJoinedField { CSphString m_sName; CSphString m_sQuery; CSphString m_sRanged; bool m_bPayload; }; /// generic SQL source params struct CSphSourceParams_SQL { // query params CSphString m_sQuery; CSphString m_sQueryRange; CSphString m_sQueryKilllist; int m_iRangeStep; int m_iRefRangeStep; bool m_bPrintQueries; CSphVector m_dQueryPre; CSphVector m_dQueryPost; CSphVector m_dQueryPostIndex; CSphVector m_dAttrs; CSphVector m_dFileFields; int m_iRangedThrottle; int m_iMaxFileBufferSize; ESphOnFileFieldError m_eOnFileFieldError; CSphVector m_dUnpack; DWORD m_uUnpackMemoryLimit; CSphVector m_dJoinedFields; // connection params CSphString m_sHost; CSphString m_sUser; CSphString m_sPass; CSphString m_sDB; int m_iPort; CSphSourceParams_SQL (); }; /// generic SQL source /// multi-field plain-text documents fetched from given query struct CSphSource_SQL : CSphSource_Document { explicit CSphSource_SQL ( const char * sName ); virtual ~CSphSource_SQL () {} bool Setup ( const CSphSourceParams_SQL & pParams ); virtual bool Connect ( CSphString & sError ); virtual void Disconnect (); virtual bool IterateStart ( CSphString & sError ); virtual BYTE ** NextDocument ( CSphString & sError ); virtual void PostIndex (); virtual bool HasAttrsConfigured () { return m_tParams.m_dAttrs.GetLength()!=0; } virtual bool HasJoinedFields () { return m_tSchema.m_iBaseFields!=m_tSchema.m_dFields.GetLength(); } virtual ISphHits * IterateJoinedHits ( CSphString & sError ); virtual bool IterateMultivaluedStart ( int iAttr, CSphString & sError ); virtual bool IterateMultivaluedNext (); virtual bool IterateKillListStart ( CSphString & sError ); virtual bool IterateKillListNext ( SphDocID_t & tDocId ); private: bool m_bSqlConnected; ///< am i connected? protected: CSphString m_sSqlDSN; BYTE * m_dFields [ SPH_MAX_FIELDS ]; ESphUnpackFormat m_dUnpack [ SPH_MAX_FIELDS ]; SphDocID_t m_uMinID; ///< grand min ID SphDocID_t m_uMaxID; ///< grand max ID SphDocID_t m_uCurrentID; ///< current min ID SphDocID_t m_uMaxFetchedID; ///< max actually fetched ID int m_iMultiAttr; ///< multi-valued attr being currently fetched int m_iSqlFields; ///< field count (for row dumper) CSphSourceParams_SQL m_tParams; bool m_bCanUnpack; bool m_bUnpackFailed; bool m_bUnpackOverflow; CSphVector m_dUnpackBuffers [ SPH_MAX_FIELDS ]; int m_iJoinedHitField; ///< currently pulling joined hits from this field (index into schema; -1 if not pulling) SphDocID_t m_iJoinedHitID; ///< last document id int m_iJoinedHitPos; ///< last hit position static const int MACRO_COUNT = 2; static const char * const MACRO_VALUES [ MACRO_COUNT ]; protected: bool SetupRanges ( const char * sRangeQuery, const char * sQuery, const char * sPrefix, CSphString & sError ); bool RunQueryStep ( const char * sQuery, CSphString & sError ); protected: virtual void SqlDismissResult () = 0; virtual bool SqlQuery ( const char * sQuery ) = 0; virtual bool SqlIsError () = 0; virtual const char * SqlError () = 0; virtual bool SqlConnect () = 0; virtual void SqlDisconnect () = 0; virtual int SqlNumFields() = 0; virtual bool SqlFetchRow() = 0; virtual DWORD SqlColumnLength ( int iIndex ) = 0; virtual const char * SqlColumn ( int iIndex ) = 0; virtual const char * SqlFieldName ( int iIndex ) = 0; const char * SqlUnpackColumn ( int iIndex, ESphUnpackFormat eFormat ); void ReportUnpackError ( int iIndex, int iError ); }; #if USE_MYSQL /// MySQL source params struct CSphSourceParams_MySQL : CSphSourceParams_SQL { CSphString m_sUsock; ///< UNIX socket int m_iFlags; ///< connection flags CSphString m_sSslKey; CSphString m_sSslCert; CSphString m_sSslCA; CSphSourceParams_MySQL (); ///< ctor. sets defaults }; /// MySQL source implementation /// multi-field plain-text documents fetched from given query struct CSphSource_MySQL : CSphSource_SQL { explicit CSphSource_MySQL ( const char * sName ); bool Setup ( const CSphSourceParams_MySQL & tParams ); protected: MYSQL_RES * m_pMysqlResult; MYSQL_FIELD * m_pMysqlFields; MYSQL_ROW m_tMysqlRow; MYSQL m_tMysqlDriver; unsigned long * m_pMysqlLengths; CSphString m_sMysqlUsock; int m_iMysqlConnectFlags; CSphString m_sSslKey; CSphString m_sSslCert; CSphString m_sSslCA; protected: virtual void SqlDismissResult (); virtual bool SqlQuery ( const char * sQuery ); virtual bool SqlIsError (); virtual const char * SqlError (); virtual bool SqlConnect (); virtual void SqlDisconnect (); virtual int SqlNumFields(); virtual bool SqlFetchRow(); virtual DWORD SqlColumnLength ( int iIndex ); virtual const char * SqlColumn ( int iIndex ); virtual const char * SqlFieldName ( int iIndex ); }; #endif // USE_MYSQL #if USE_PGSQL /// PgSQL specific source params struct CSphSourceParams_PgSQL : CSphSourceParams_SQL { CSphString m_sClientEncoding; CSphSourceParams_PgSQL (); }; /// PgSQL source implementation /// multi-field plain-text documents fetched from given query struct CSphSource_PgSQL : CSphSource_SQL { explicit CSphSource_PgSQL ( const char * sName ); bool Setup ( const CSphSourceParams_PgSQL & pParams ); virtual bool IterateStart ( CSphString & sError ); protected: PGresult * m_pPgResult; ///< postgresql execution restult context PGconn * m_tPgDriver; ///< postgresql connection context int m_iPgRows; ///< how much rows last step returned int m_iPgRow; ///< current row (0 based, as in PQgetvalue) CSphString m_sPgClientEncoding; CSphVector m_dIsColumnBool; protected: virtual void SqlDismissResult (); virtual bool SqlQuery ( const char * sQuery ); virtual bool SqlIsError (); virtual const char * SqlError (); virtual bool SqlConnect (); virtual void SqlDisconnect (); virtual int SqlNumFields(); virtual bool SqlFetchRow(); virtual DWORD SqlColumnLength ( int iIndex ); virtual const char * SqlColumn ( int iIndex ); virtual const char * SqlFieldName ( int iIndex ); }; #endif // USE_PGSQL #if USE_ODBC struct CSphSourceParams_ODBC: CSphSourceParams_SQL { CSphString m_sOdbcDSN; ///< ODBC DSN CSphString m_sColBuffers; ///< column buffer sizes (eg "col1=2M, col2=4M") bool m_bWinAuth; ///< auth type (MS SQL only) bool m_bUnicode; ///< whether to ask for Unicode or SBCS (C char) data (MS SQL only) CSphSourceParams_ODBC (); }; /// ODBC source implementation struct CSphSource_ODBC : CSphSource_SQL { explicit CSphSource_ODBC ( const char * sName ); bool Setup ( const CSphSourceParams_ODBC & tParams ); protected: virtual void SqlDismissResult (); virtual bool SqlQuery ( const char * sQuery ); virtual bool SqlIsError (); virtual const char * SqlError (); virtual bool SqlConnect (); virtual void SqlDisconnect (); virtual int SqlNumFields(); virtual bool SqlFetchRow(); virtual const char * SqlColumn ( int iIndex ); virtual const char * SqlFieldName ( int iIndex ); virtual DWORD SqlColumnLength ( int iIndex ); virtual void OdbcPostConnect () {} protected: CSphString m_sOdbcDSN; bool m_bWinAuth; bool m_bUnicode; SQLHENV m_hEnv; SQLHDBC m_hDBC; SQLHANDLE m_hStmt; int m_nResultCols; CSphString m_sError; struct QueryColumn_t { CSphVector m_dContents; CSphVector m_dRaw; CSphString m_sName; SQLLEN m_iInd; int m_iBufferSize; ///< size of m_dContents and m_dRaw buffers, in bytes bool m_bUnicode; ///< whether this column needs UCS-2 to UTF-8 translation bool m_bTruncated; ///< whether data was truncated when fetching rows }; static const int DEFAULT_COL_SIZE = 1024; ///< default column buffer size static const int VARCHAR_COL_SIZE = 1048576; ///< default column buffer size for VARCHAR columns static const int MAX_COL_SIZE = 8*1048576; ///< hard limit on column buffer size static const int WARN_ROW_SIZE = 32*1048576; ///< warning thresh (NOT a hard limit) on row buffer size CSphVector m_dColumns; SmallStringHash_T m_hColBuffers; void GetSqlError ( SQLSMALLINT iHandleType, SQLHANDLE hHandle ); }; /// MS SQL source implemenation struct CSphSource_MSSQL : public CSphSource_ODBC { explicit CSphSource_MSSQL ( const char * sName ) : CSphSource_ODBC ( sName ) {} virtual void OdbcPostConnect (); }; #endif // USE_ODBC /// XML pipe source implementation class CSphSource_XMLPipe : public CSphSource { public: CSphSource_XMLPipe ( BYTE * dInitialBuf, int iBufLen, const char * sName ); ///< ctor ~CSphSource_XMLPipe (); ///< dtor bool Setup ( FILE * pPipe, const char * sCommand ); ///< memorize the command virtual bool Connect ( CSphString & sError ); ///< run the command and open the pipe virtual void Disconnect (); ///< close the pipe virtual bool IterateStart ( CSphString & ) { return true; } ///< Connect() starts getting documents automatically, so this one is empty virtual bool IterateDocument ( CSphString & sError ); ///< parse incoming chunk and emit document virtual ISphHits * IterateHits ( CSphString & sError ); ///< parse incoming chunk and emit some hits virtual bool HasAttrsConfigured () { return true; } ///< xmlpipe always has some attrs for now virtual bool IterateMultivaluedStart ( int, CSphString & ) { return false; } ///< xmlpipe does not support multi-valued attrs for now virtual bool IterateMultivaluedNext () { return false; } ///< xmlpipe does not support multi-valued attrs for now virtual SphRange_t IterateFieldMVAStart ( int ); virtual bool IterateKillListStart ( CSphString & ) { return false; } virtual bool IterateKillListNext ( SphDocID_t & ) { return false; } private: enum Tag_e { TAG_DOCUMENT = 0, TAG_ID, TAG_GROUP, TAG_TITLE, TAG_BODY }; private: CSphString m_sCommand; ///< my command Tag_e m_eTag; ///< what's our current tag const char * m_pTag; ///< tag name int m_iTagLength; ///< tag name length int m_iBufferSize; ///< buffer size bool m_bEOF; ///< EOF encountered bool m_bWarned; ///< warned of buffer size already int m_iInitialBufLen; ///< initial buffer len FILE * m_pPipe; ///< incoming stream BYTE * m_sBuffer; ///< buffer BYTE * m_pBuffer; ///< current buffer pos BYTE * m_pBufferEnd; ///< buffered end pos int m_iWordPos; ///< current word position ISphHits m_tHits; ///< my hitvector bool m_bHitsReady; private: /// set current tag void SetTag ( const char * sTag ); /// read in some more data /// moves everything from current ptr (m_pBuffer) to the beginng /// reads in as much data as possible to the end /// returns false on EOF bool UpdateBuffer (); /// skips whitespace /// does buffer updates /// returns false on EOF bool SkipWhitespace (); /// check if what's at current pos is either opening/closing current tag (m_pTag) /// return false on failure bool CheckTag ( bool bOpen, CSphString & sError ); /// skips whitespace and opening/closing current tag (m_pTag) /// returns false on failure bool SkipTag ( bool bOpen, CSphString & sError ); /// scan for tag with integer value bool ScanInt ( const char * sTag, DWORD * pRes, CSphString & sError ); /// scan for tag with integer value bool ScanInt ( const char * sTag, uint64_t * pRes, CSphString & sError ); /// scan for tag with integer value bool ScanInt ( const char * sTag, int64_t * pRes, CSphString & sError ) { return ScanInt ( sTag, (uint64_t*)pRes, sError ); } /// scan for tag with string value bool ScanStr ( const char * sTag, char * pRes, int iMaxLength, CSphString & sError ); /// check for hits overun hits buffer void CheckHitsCount ( const char * sField ); }; #if USE_LIBEXPAT || USE_LIBXML class CSphConfigSection; CSphSource * sphCreateSourceXmlpipe2 ( const CSphConfigSection * pSource, FILE * pPipe, BYTE * dInitialBuf, int iBufLen, const char * szSourceName, int iMaxFieldLen ); #endif FILE * sphDetectXMLPipe ( const char * szCommand, BYTE * dBuf, int & iBufSize, int iMaxBufSize, bool & bUsePipe2 ); ///////////////////////////////////////////////////////////////////////////// // SEARCH QUERIES ///////////////////////////////////////////////////////////////////////////// /// search query sorting orders enum ESphSortOrder { SPH_SORT_RELEVANCE = 0, ///< sort by document relevance desc, then by date SPH_SORT_ATTR_DESC = 1, ///< sort by document date desc, then by relevance desc SPH_SORT_ATTR_ASC = 2, ///< sort by document date asc, then by relevance desc SPH_SORT_TIME_SEGMENTS = 3, ///< sort by time segments (hour/day/week/etc) desc, then by relevance desc SPH_SORT_EXTENDED = 4, ///< sort by SQL-like expression (eg. "@relevance DESC, price ASC, @id DESC") SPH_SORT_EXPR = 5, ///< sort by arithmetic expression in descending order (eg. "@id + max(@weight,1000)*boost + log(price)") SPH_SORT_TOTAL }; /// search query matching mode enum ESphMatchMode { SPH_MATCH_ALL = 0, ///< match all query words SPH_MATCH_ANY, ///< match any query word SPH_MATCH_PHRASE, ///< match this exact phrase SPH_MATCH_BOOLEAN, ///< match this boolean query SPH_MATCH_EXTENDED, ///< match this extended query SPH_MATCH_FULLSCAN, ///< match all document IDs w/o fulltext query, apply filters SPH_MATCH_EXTENDED2, ///< extended engine V2 (TEMPORARY, WILL BE REMOVED IN 0.9.8-RELEASE) SPH_MATCH_TOTAL }; /// search query relevance ranking mode enum ESphRankMode { SPH_RANK_PROXIMITY_BM25 = 0, ///< default mode, phrase proximity major factor and BM25 minor one (aka SPH03) SPH_RANK_BM25 = 1, ///< statistical mode, BM25 ranking only (faster but worse quality) SPH_RANK_NONE = 2, ///< no ranking, all matches get a weight of 1 SPH_RANK_WORDCOUNT = 3, ///< simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts SPH_RANK_PROXIMITY = 4, ///< phrase proximity (aka SPH01) SPH_RANK_MATCHANY = 5, ///< emulate old match-any weighting (aka SPH02) SPH_RANK_FIELDMASK = 6, ///< sets bits where there were matches SPH_RANK_SPH04 = 7, ///< codename SPH04, phrase proximity + bm25 + head/exact boost SPH_RANK_EXPR = 8, ///< rank by user expression (eg. "sum(lcs*user_weight)*1000+bm25") SPH_RANK_TOTAL, SPH_RANK_DEFAULT = SPH_RANK_PROXIMITY_BM25 }; /// search query grouping mode enum ESphGroupBy { SPH_GROUPBY_DAY = 0, ///< group by day SPH_GROUPBY_WEEK = 1, ///< group by week SPH_GROUPBY_MONTH = 2, ///< group by month SPH_GROUPBY_YEAR = 3, ///< group by year SPH_GROUPBY_ATTR = 4, ///< group by attribute value SPH_GROUPBY_ATTRPAIR= 5 ///< group by sequential attrs pair (rendered redundant by 64bit attrs support; removed) }; /// search query filter types enum ESphFilter { SPH_FILTER_VALUES = 0, ///< filter by integer values set SPH_FILTER_RANGE = 1, ///< filter by integer range SPH_FILTER_FLOATRANGE = 2 ///< filter by float range }; /// search query filter class CSphFilterSettings { public: CSphString m_sAttrName; ///< filtered attribute name bool m_bExclude; ///< whether this is "include" or "exclude" filter (default is "include") ESphFilter m_eType; ///< filter type union { SphAttr_t m_uMinValue; ///< range min float m_fMinValue; ///< range min }; union { SphAttr_t m_uMaxValue; ///< range max float m_fMaxValue; ///< range max }; CSphVector m_dValues; ///< integer values set public: CSphFilterSettings (); void SetExternalValues ( const SphAttr_t * pValues, int nValues ); SphAttr_t GetValue ( int iIdx ) const { assert ( iIdx m_dValues; ///< id-value overrides }; /// query selection item struct CSphQueryItem { CSphString m_sExpr; ///< expression to compute CSphString m_sAlias; ///< alias to return ESphAggrFunc m_eAggrFunc; CSphQueryItem() : m_eAggrFunc ( SPH_AGGR_NONE ) {} }; /// known collations enum ESphCollation { SPH_COLLATION_LIBC_CI, SPH_COLLATION_LIBC_CS, SPH_COLLATION_UTF8_GENERAL_CI, SPH_COLLATION_BINARY, SPH_COLLATION_DEFAULT = SPH_COLLATION_LIBC_CI }; /// search query class CSphQuery { public: CSphString m_sIndexes; ///< indexes to search CSphString m_sQuery; ///< cooked query string for the engine (possibly transformed during legacy matching modes fixup) CSphString m_sRawQuery; ///< raw query string from the client for searchd log, agents, etc int m_iOffset; ///< offset into result set (as X in MySQL LIMIT X,Y clause) int m_iLimit; ///< limit into result set (as Y in MySQL LIMIT X,Y clause) DWORD * m_pWeights; ///< user-supplied per-field weights. may be NULL. default is NULL. NOT OWNED, WILL NOT BE FREED in dtor. int m_iWeights; ///< number of user-supplied weights. missing fields will be assigned weight 1. default is 0 ESphMatchMode m_eMode; ///< match mode. default is "match all" ESphRankMode m_eRanker; ///< ranking mode, default is proximity+BM25 CSphString m_sRankerExpr; ///< ranking expression for SPH_RANK_EXPR ESphSortOrder m_eSort; ///< sort mode CSphString m_sSortBy; ///< attribute to sort by int m_iMaxMatches; ///< max matches to retrieve, default is 1000. more matches use more memory and CPU time to hold and sort them CSphVector m_dFilters; ///< filters CSphString m_sGroupBy; ///< group-by attribute name ESphGroupBy m_eGroupFunc; ///< function to pre-process group-by attribute value with CSphString m_sGroupSortBy; ///< sorting clause for groups in group-by mode CSphString m_sGroupDistinct; ///< count distinct values for this attribute int m_iCutoff; ///< matches count threshold to stop searching at (default is 0; means to search until all matches are found) int m_iRetryCount; ///< retry count, for distributed queries int m_iRetryDelay; ///< retry delay, for distributed queries bool m_bGeoAnchor; ///< do we have an anchor CSphString m_sGeoLatAttr; ///< latitude attr name CSphString m_sGeoLongAttr; ///< longitude attr name float m_fGeoLatitude; ///< anchor latitude float m_fGeoLongitude; ///< anchor longitude CSphVector m_dIndexWeights; ///< per-index weights CSphVector m_dFieldWeights; ///< per-field weights DWORD m_uMaxQueryMsec; ///< max local index search time, in milliseconds (default is 0; means no limit) CSphString m_sComment; ///< comment to pass verbatim in the log file CSphVector m_dOverrides; ///< per-query attribute value overrides CSphString m_sSelect; ///< select-list (attributes and/or expressions) CSphString m_sOrderBy; ///< order-by clause bool m_bReverseScan; ///< perform scan in reverse order int m_iSQLSelectStart; ///< SQL parser helper int m_iSQLSelectEnd; ///< SQL parser helper public: int m_iOldVersion; ///< version, to fixup old queries int m_iOldGroups; ///< 0.9.6 group filter values count DWORD * m_pOldGroups; ///< 0.9.6 group filter values DWORD m_iOldMinTS; ///< 0.9.6 min timestamp DWORD m_iOldMaxTS; ///< 0.9.6 max timestamp DWORD m_iOldMinGID; ///< 0.9.6 min group id DWORD m_iOldMaxGID; ///< 0.9.6 max group id public: CSphVector m_dItems; ///< parsed select-list ESphCollation m_eCollation; ///< ORDER BY collation bool m_bAgent; ///< agent mode (may need extra cols on output) public: CSphQuery (); ///< ctor, fills defaults ~CSphQuery (); ///< dtor, frees owned stuff /// return index weight from m_dIndexWeights; or 1 by default int GetIndexWeight ( const char * sName ) const; /// parse select list string into items bool ParseSelectList ( CSphString & sError ); }; /// search query meta-info class CSphQueryResultMeta { public: int m_iQueryTime; ///< query time, milliseconds int64_t m_iCpuTime; ///< user time, microseconds int m_iMultiplier; ///< multi-query multiplier, -1 to indicate error struct WordStat_t { int64_t m_iDocs; ///< document count for this term int64_t m_iHits; ///< hit count for this term bool m_bExpanded; ///< is this term from query itself or was expanded WordStat_t() : m_iDocs ( 0 ) , m_iHits ( 0 ) , m_bExpanded ( false ) {} }; SmallStringHash_T m_hWordStats; ///< hash of i-th search term (normalized word form) int m_iMatches; ///< total matches returned (upto MAX_MATCHES) int64_t m_iTotalMatches; ///< total matches found (unlimited) CSphString m_sError; ///< error message CSphString m_sWarning; ///< warning message CSphQueryResultMeta (); ///< ctor virtual ~CSphQueryResultMeta () {} ///< dtor void AddStat ( const CSphString & sWord, int64_t iDocs, int64_t iHits, bool bExpanded ); CSphQueryResultMeta ( const CSphQueryResultMeta & tMeta ); ///< copy ctor CSphQueryResultMeta & operator= ( const CSphQueryResultMeta & tMeta ); ///< copy }; /// search query result (meta-info plus actual matches) class CSphQueryResult : public CSphQueryResultMeta { public: CSphSwapVector m_dMatches; ///< top matching documents, no more than MAX_MATCHES CSphSchema m_tSchema; ///< result schema const DWORD * m_pMva; ///< pointer to MVA storage const BYTE * m_pStrings; ///< pointer to strings storage CSphVector m_dStorage2Free; /// < aggregated external storage from rt indexes int m_iOffset; ///< requested offset into matches array int m_iCount; ///< count which will be actually served (computed from total, offset and limit) int m_iSuccesses; public: CSphQueryResult (); ///< ctor virtual ~CSphQueryResult (); ///< dtor, which releases all owned stuff void LeakStorages ( CSphQueryResult & tDst ); }; ///////////////////////////////////////////////////////////////////////////// // ATTRIBUTE UPDATE QUERY ///////////////////////////////////////////////////////////////////////////// struct CSphAttrUpdate { CSphVector m_dAttrs; ///< update schema (ie. what attrs to update) CSphVector m_dPool; ///< update values pool CSphVector m_dDocids; ///< document IDs vector CSphVector m_dRows; ///< document attribute's vector, used instead of m_dDocids. CSphVector m_dRowOffset; ///< document row offsets in the pool (1 per doc, i.e. the length is the same as of m_dDocids) }; ///////////////////////////////////////////////////////////////////////////// // FULLTEXT INDICES ///////////////////////////////////////////////////////////////////////////// /// progress info struct CSphIndexProgress { enum Phase_e { PHASE_COLLECT, ///< document collection phase PHASE_SORT, ///< final sorting phase PHASE_COLLECT_MVA, ///< multi-valued attributes collection phase PHASE_SORT_MVA, ///< multi-valued attributes collection phase PHASE_MERGE, ///< index merging PHASE_PREREAD, ///< searchd startup, prereading data PHASE_PRECOMPUTE ///< searchd startup, indexing attributes }; Phase_e m_ePhase; ///< current indexing phase int m_iDocuments; ///< PHASE_COLLECT: documents collected so far int64_t m_iBytes; ///< PHASE_COLLECT: bytes collected so far; ///< PHASE_PREREAD: bytes read so far; int64_t m_iBytesTotal; ///< PHASE_PREREAD: total bytes to read; int64_t m_iAttrs; ///< PHASE_COLLECT_MVA, PHASE_SORT_MVA: attrs processed so far int64_t m_iAttrsTotal; ///< PHASE_SORT_MVA: attrs total SphOffset_t m_iHits; ///< PHASE_SORT: hits sorted so far SphOffset_t m_iHitsTotal; ///< PHASE_SORT: hits total int m_iWords; ///< PHASE_MERGE: words merged so far int m_iDone; ///< generic percent, 0..1000 range CSphIndexProgress () : m_ePhase ( PHASE_COLLECT ) , m_iDocuments ( 0 ) , m_iBytes ( 0 ) , m_iBytesTotal ( 0 ) , m_iAttrs ( 0 ) , m_iAttrsTotal ( 0 ) , m_iHits ( 0 ) , m_iHitsTotal ( 0 ) , m_iWords ( 0 ) {} /// builds a message to print /// WARNING, STATIC BUFFER, NON-REENTRANT const char * BuildMessage() const; }; /// sorting key part types enum ESphSortKeyPart { SPH_KEYPART_ID, SPH_KEYPART_WEIGHT, SPH_KEYPART_INT, SPH_KEYPART_FLOAT, SPH_KEYPART_STRING }; typedef int ( *SphStringCmp_fn )( const BYTE * pStr1, const BYTE * pStr2 ); /// match comparator state struct CSphMatchComparatorState { static const int MAX_ATTRS = 5; ESphSortKeyPart m_eKeypart[MAX_ATTRS]; ///< sort-by key part type CSphAttrLocator m_tLocator[MAX_ATTRS]; ///< sort-by attr locator DWORD m_uAttrDesc; ///< sort order mask (if i-th bit is set, i-th attr order is DESC) DWORD m_iNow; ///< timestamp (for timesegments sorting mode) SphStringCmp_fn m_fnStrCmp; ///< string comparator /// create default empty state CSphMatchComparatorState () : m_uAttrDesc ( 0 ) , m_iNow ( 0 ) , m_fnStrCmp ( NULL ) { for ( int i=0; i=0 && iAttr & dSources, int iMemoryLimit, int iWriteBuffer ) = 0; /// build index by mering current index with given index virtual bool Merge ( CSphIndex * pSource, CSphVector & dFilters, bool bMergeKillLists ) = 0; public: /// check all data files, preload schema, and preallocate enough shared RAM to load memory-cached data virtual bool Prealloc ( bool bMlock, bool bStripPath, CSphString & sWarning ) = 0; /// deallocate all previously preallocated shared data virtual void Dealloc () = 0; /// precache everything which needs to be precached // WARNING, WILL BE CALLED FROM DIFFERENT PROCESS, MUST ONLY MODIFY SHARED MEMORY virtual bool Preread () = 0; /// set new index base path virtual void SetBase ( const char * sNewBase ) = 0; /// set new index base path, and physically rename index files too virtual bool Rename ( const char * sNewBase ) = 0; /// obtain exclusive lock on this index virtual bool Lock () = 0; /// dismiss exclusive lock and unlink lock file virtual void Unlock () = 0; /// relock shared RAM (only on daemonization) virtual bool Mlock () = 0; /// called when index is loaded and prepared to work virtual void PostSetup() = 0; public: virtual bool EarlyReject ( CSphQueryContext * pCtx, CSphMatch & tMatch ) const = 0; virtual const CSphSourceStats & GetStats () const = 0; void SetCacheSize ( int iMaxCachedDocs, int iMaxCachedHits ); virtual bool MultiQuery ( const CSphQuery * pQuery, CSphQueryResult * pResult, int iSorters, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag=0 ) const = 0; virtual bool MultiQueryEx ( int iQueries, const CSphQuery * ppQueries, CSphQueryResult ** ppResults, ISphMatchSorter ** ppSorters, const CSphVector * pExtraFilters, int iTag=0 ) const = 0; virtual bool GetKeywords ( CSphVector & dKeywords, const char * szQuery, bool bGetStats, CSphString & sError ) const = 0; public: /// updates memory-cached attributes in real time /// returns non-negative amount of actually found and updated records on success /// on failure, -1 is returned and GetLastError() contains error message virtual int UpdateAttributes ( const CSphAttrUpdate & tUpd, int iIndex, CSphString & sError ) = 0; /// saves memory-cached attributes, if there were any updates to them /// on failure, false is returned and GetLastError() contains error message virtual bool SaveAttributes () = 0; virtual DWORD GetAttributeStatus () const = 0; public: /// internal debugging hook, DO NOT USE virtual void DebugDumpHeader ( FILE * fp, const char * sHeaderName, bool bConfig ) = 0; /// internal debugging hook, DO NOT USE virtual void DebugDumpDocids ( FILE * fp ) = 0; /// internal debugging hook, DO NOT USE virtual void DebugDumpHitlist ( FILE * fp, const char * sKeyword, bool bID ) = 0; /// internal debugging hook, DO NOT USE virtual int DebugCheck ( FILE * fp ) = 0; /// getter for name const char * GetName () { return m_sIndexName.cstr(); } public: int64_t m_iTID; bool m_bExpandKeywords; ///< enable automatic query-time keyword expansion (to "( word | =word | *word* )") int m_iExpansionLimit; protected: ProgressCallback_t * m_pProgress; CSphSchema m_tSchema; CSphString m_sLastError; CSphString m_sLastWarning; bool m_bInplaceSettings; int m_iHitGap; int m_iDocinfoGap; float m_fRelocFactor; float m_fWriteFactor; bool m_bKeepFilesOpen; ///< keep files open to avoid race on seamless rotation bool m_bPreloadWordlist; ///< preload wordlists or keep them on disk bool m_bStripperInited; ///< was stripper initialized (old index version (<9) handling) bool m_bEnableStar; ///< enable star-syntax public: bool m_bId32to64; ///< did we convert id32 to id64 on startup protected: CSphIndexSettings m_tSettings; ISphTokenizer * m_pTokenizer; CSphDict * m_pDict; int m_iMaxCachedDocs; int m_iMaxCachedHits; CSphString m_sIndexName; }; // update attributes with index pointer attached struct CSphAttrUpdateEx { const CSphAttrUpdate* m_pUpdate; ///< the unchangeable update pool CSphIndex * m_pIndex; ///< the index on which the update should happen CSphString * m_pError; ///< the error, if any int m_iAffected; ///< num of updated rows. CSphAttrUpdateEx() : m_pUpdate ( NULL ) , m_pIndex ( NULL ) , m_pError ( NULL ) , m_iAffected ( 0 ) {} }; ///////////////////////////////////////////////////////////////////////////// /// create phrase fulltext index implemntation CSphIndex * sphCreateIndexPhrase ( const char* szIndexName, const char * sFilename ); /// tell libsphinx to be quiet or not (logs and loglevels to come later) void sphSetQuiet ( bool bQuiet ); /// creates proper queue for given query /// may return NULL on error; in this case, error message is placed in sError /// if the pUpdate is given, creates the updater's queue and perform the index update /// instead of searching ISphMatchSorter * sphCreateQueue ( const CSphQuery * pQuery, const CSphSchema & tSchema, CSphString & sError, bool bComputeItems=true, CSphSchema * pExtra=NULL, CSphAttrUpdateEx* pUpdate=NULL ); /// convert queue to sorted array, and add its entries to result's matches array void sphFlattenQueue ( ISphMatchSorter * pQueue, CSphQueryResult * pResult, int iTag ); /// setup per-keyword read buffer sizes void sphSetReadBuffers ( int iReadBuffer, int iReadUnhinted ); /// check query for expressions bool sphHasExpressions ( const CSphQuery & tQuery, const CSphSchema & tSchema ); /// initialize collation tables void sphCollationInit (); ///////////////////////////////////////////////////////////////////////////// // workaround to suppress C4511/C4512 warnings (copy ctor and assignment operator) in VS 2003 #if _MSC_VER>=1300 && _MSC_VER<1400 #pragma warning(disable:4511) #pragma warning(disable:4512) #endif // suppress C4201 (nameless struct/union is a nonstandard extension) because even min-spec gcc 3.4.6 works ok #if defined(_MSC_VER) #pragma warning(disable:4201) #endif #endif // _sphinx_ // // $Id: sphinx.h 3131 2012-03-01 09:04:19Z deogar $ // sphinx-2.0.4-release/src/sphinxexcerpt.cpp0000644000176700017710000025777711711621267020177 0ustar deogardeogar// // $Id: sphinxexcerpt.cpp 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include "sphinx.h" #include "sphinxexcerpt.h" #include "sphinxutils.h" #include "sphinxsearch.h" #include "sphinxquery.h" #include "sphinxint.h" #include ///////////////////////////////////////////////////////////////////////////// // THE EXCERPTS GENERATOR ///////////////////////////////////////////////////////////////////////////// static const int MAX_HIGHLIGHT_WORDS = 256; #define UINT32_MASK 0xffffffffUL #define UINT16_MASK 0xffff typedef uint64_t ZonePacked_t; class ExcerptGen_c { friend class SnippetsQwordSetup; public: explicit ExcerptGen_c ( bool bUtf8 ); ~ExcerptGen_c () {} char * BuildExcerpt ( const ExcerptQuery_t & tQuery ); void TokenizeQuery ( const ExcerptQuery_t &, CSphDict * pDict, ISphTokenizer * pTokenizer, const CSphIndexSettings & tSettings ); void TokenizeDocument ( char * pData, int iDataLen, CSphDict * pDict, ISphTokenizer * pTokenizer, bool bFillMasks, const ExcerptQuery_t & q, const CSphIndexSettings & tSettings ); void SetMarker ( CSphHitMarker * pMarker ) { m_pMarker = pMarker; } void SetExactPhrase ( const ExcerptQuery_t & tQuery ); public: enum Token_e { TOK_NONE = 0, ///< unspecified type, also used as the end marker TOK_WORD, ///< just a word TOK_SPACE, ///< whitespace chars seq TOK_BREAK, ///< non-word chars seq which delimit a phrase part or boundary TOK_SPZ ///< SENTENCE, PARAGRAPH, ZONE }; struct Token_t { Token_e m_eType; ///< token type int m_iStart; ///< token start (index in codepoints array) int m_iLengthCP; ///< token length (in codepoints) int m_iLengthBytes; ///< token length (in bytes) int m_iWeight; ///< token weight DWORD m_uWords; ///< matching query words mask SphWordID_t m_iWordID; ///< token word ID from dictionary SphWordID_t m_iBlendID; ///< blended word ID (eg. "T-mobile" would not tokenize itself, but still shadow "T" and "mobile") DWORD m_uPosition; ///< hit position in document void Reset () { m_eType = TOK_NONE; m_iStart = 0; m_iLengthCP = 0; m_iLengthBytes = 0; m_iWeight = 0; m_uWords = 0; m_iWordID = 0; m_iBlendID = 0; m_uPosition = 0; } }; struct TokenSpan_t { int m_iStart; ///< start index, inclusive int m_iEnd; ///< end index, inclusive int m_iWords; ///< number of TOK_WORDS tokens int m_iQwords; ///< number of words matching query void Reset () { m_iStart = -1; m_iEnd = -2; m_iWords = 0; m_iQwords = 0; } void Add ( int i, bool bQword ) { assert ( m_iStart & GetZones () const { return m_dZones; } const SmallStringHash_T & GetZonesName () const { return m_hZones; } protected: CSphVector m_dTokens; ///< source text tokens CSphVector m_dWords; ///< query words tokens int m_iDocumentWords; int m_iPassageId; CSphString m_sBuffer; // FIXME!!! REMOVE!!! ME!!! CSphVector m_dResult; ///< result holder int m_iResultLen; ///< result codepoints count CSphVector m_dPassages; ///< extracted passages bool m_bExactPhrase; DWORD m_uFoundWords; ///< found words mask int m_iQwordCount; int m_iLastWord; CSphHitMarker * m_pMarker; CSphVector m_dKeywordsBuffer; CSphVector m_dKeywords; CSphVector m_dZones; ///< zones for current document SmallStringHash_T m_hZones; ///< zones names CSphVector m_dZonePos; ///< zones positions (in characters) CSphVector m_dZoneParent; ///< zones parent type bool m_bUtf8; int m_iTotalCP; protected: void CalcPassageWeight ( Passage_t & tPass, const TokenSpan_t & tSpan, int iMaxWords, int iWordCountCoeff ); void UpdateGaps ( Passage_t & tPass, const TokenSpan_t & tSpan, int iMaxWords ); bool ExtractPassages ( const ExcerptQuery_t & q ); bool ExtractPhrases ( const ExcerptQuery_t & q ); void HighlightPhrase ( const ExcerptQuery_t & q, int iTok, int iEnd ); void HighlightAll ( const ExcerptQuery_t & q ); void HighlightStart ( const ExcerptQuery_t & q ); bool HighlightBestPassages ( const ExcerptQuery_t & q ); void ResultEmit ( const char * sLine, bool bHasMacro=false, int iPassageId=0, const char * sPostPassage=NULL ); void ResultEmit ( const Token_t & sTok ); void AddJunk ( int iStart, int iLength, int iBoundary ); void AddBoundary (); void MarkHits (); bool SetupWindow ( TokenSpan_t & tSpan, Passage_t & tPass, int iFrom, int iCpLimit, const ExcerptQuery_t & q ); bool FlushPassage ( const Passage_t & tPass, int iLCSThresh ); }; // find string sFind in first iLimit characters of sBuffer static BYTE * FindString ( BYTE * sBuffer, BYTE * sFind, int iLimit ) { assert ( iLimit > 0 ); assert ( sBuffer ); assert ( sFind ); iLimit++; do { while ( *sBuffer!=*sFind ) if ( !*++sBuffer || !--iLimit ) return NULL; int iSubLimit = iLimit; BYTE * sSubFind = sFind; BYTE * sSubBuffer = sBuffer; while ( *sSubFind && *sSubBuffer && *sSubFind==*sSubBuffer++ ) { sSubFind++; if ( !--iSubLimit ) return NULL; } if ( !*sSubFind ) return sBuffer; } while ( *++sBuffer ); return NULL; } /// hitman used here in snippets typedef Hitman_c<8> HITMAN; /// snippets query words for different cases class ISnippetsQword : public ISphQword { public: CSphString * m_sBuffer; CSphVector * m_dTokens; ISphTokenizer * m_pTokenizer; DWORD * m_uFoundWords; // word information, filled during query word setup int m_iWordLength; int m_iLastIndex; DWORD m_uWordMask; // iterator state CSphMatch m_tMatch; int m_iToken; int m_iChunk; typedef ExcerptGen_c::Token_t Token_t; ISnippetsQword() : m_iToken ( 0 ) , m_iChunk ( 0 ) {} virtual void SeekHitlist ( SphOffset_t ) {} virtual const CSphMatch & GetNextDoc ( DWORD * ) { m_dQwordFields.Set(); if ( ( m_iChunk++ )==0 ) { if ( GetNextHit()!=EMPTY_HIT ) { m_tMatch.m_iDocID = 1; m_iToken--; } else m_tMatch.m_iDocID = 0; } else m_tMatch.m_iDocID = 0; return m_tMatch; } virtual const char * OnSetup ( CSphDict * ) { m_iWordLength = strlen ( m_sDictWord.cstr() ); return m_sDictWord.cstr(); } }; /// simple keyword match on id struct SnippetsQword_Exact_c: public ISnippetsQword { virtual Hitpos_t GetNextHit () { while ( m_iToken < m_dTokens->GetLength() ) { Token_t & tToken = (*m_dTokens)[m_iToken++]; if ( !( tToken.m_eType==ExcerptGen_c::TOK_WORD || tToken.m_eType==ExcerptGen_c::TOK_SPZ ) ) continue; if ( tToken.m_iWordID==m_iWordID || tToken.m_iBlendID==m_iWordID ) { tToken.m_uWords |= m_uWordMask; *m_uFoundWords |= m_uWordMask; return HITMAN::Create ( 0, tToken.m_uPosition, ( m_iToken-1 )==m_iLastIndex ); } } return EMPTY_HIT; } }; /// partial matches template < typename COMPARE > struct SnippetsQword_c: public ISnippetsQword { virtual Hitpos_t GetNextHit () { while ( m_iToken < m_dTokens->GetLength() ) { Token_t & tToken = (*m_dTokens)[m_iToken++]; if ( tToken.m_eType!=ExcerptGen_c::TOK_WORD ) continue; m_pTokenizer->SetBuffer ( (BYTE *) &m_sBuffer->cstr() [ tToken.m_iStart ], tToken.m_iLengthBytes ); BYTE * sToken = m_pTokenizer->GetToken(); // OPTIMIZE? token can be memorized and shared between qwords if ( (*(COMPARE *)this).Match ( tToken, sToken ) ) { tToken.m_uWords |= m_uWordMask; *m_uFoundWords |= m_uWordMask; return HITMAN::Create ( 0, tToken.m_uPosition, ( m_iToken-1 )==m_iLastIndex ); } } return EMPTY_HIT; } }; template < typename COMPARE > struct SnippetQword_Star_t : public SnippetsQword_c { virtual const char * OnSetup ( CSphDict * ) { this->m_iWordLength = strlen ( this->m_sWord.cstr() ); return this->m_sWord.cstr(); } }; struct SnippetsQword_StarFront_c : public SnippetQword_Star_t { inline bool Match ( const Token_t & tToken, BYTE * sToken ) { int iOffset = tToken.m_iLengthBytes - m_iWordLength; return iOffset>=0 && memcmp ( m_sWord.cstr(), sToken + iOffset, m_iWordLength )==0; } }; struct SnippetsQword_StarBack_c : public SnippetQword_Star_t { inline bool Match ( const Token_t & tToken, BYTE * sToken ) { return ( tToken.m_iLengthBytes>=m_iWordLength ) && memcmp ( m_sWord.cstr(), sToken, m_iWordLength )==0; } }; struct SnippetsQword_StarBoth_c : public SnippetQword_Star_t { inline bool Match ( const Token_t & tToken, BYTE * sToken ) { return FindString ( sToken, (BYTE *)m_sWord.cstr(), tToken.m_iLengthBytes )!=NULL; } }; struct SnippetsQword_ExactForm_c : public SnippetsQword_c { inline bool Match ( const Token_t & tToken, BYTE * sToken ) { return tToken.m_iBlendID==m_iWordID || ( memcmp ( sToken, m_sDictWord.cstr()+1, m_iWordLength )==0 ); } virtual const char* OnSetup ( CSphDict * pDict ) { // FIXME!!! to match with blended parts it recalculates wordID for word without head '=' part int iLen = m_sWord.Length()-1; BYTE sTmp [ 3*SPH_MAX_WORD_LEN + 16 ]; assert ( iLen>0 && iLen<(int)sizeof(sTmp) ); assert ( m_sWord.Begins ( "=" ) ); assert ( pDict && m_pTokenizer ); memcpy ( sTmp, m_sWord.cstr()+1, iLen ); sTmp[iLen] = '\0'; m_pTokenizer->SetBuffer ( sTmp, iLen ); while ( m_pTokenizer->GetToken()!=NULL ) { if ( m_pTokenizer->TokenIsBlended() ) { m_pTokenizer->SkipBlended(); m_iWordID = pDict->GetWordID ( sTmp ); break; } } return SnippetsQword_c::OnSetup ( pDict ); } }; /// snippets query word setup /// FIXME! throw these away in favor of fastpath ones class SnippetsQwordSetup: public ISphQwordSetup { ExcerptGen_c * m_pGenerator; ISphTokenizer * m_pTokenizer; public: SnippetsQwordSetup ( ExcerptGen_c * pGenerator, ISphTokenizer * pTokenizer ) : m_pGenerator ( pGenerator ) , m_pTokenizer ( pTokenizer ) {} virtual ISphQword * QwordSpawn ( const XQKeyword_t & tWord ) const; virtual bool QwordSetup ( ISphQword * pQword ) const; }; ISphQword * SnippetsQwordSetup::QwordSpawn ( const XQKeyword_t & tWord ) const { if ( tWord.m_sWord.cstr()[0]=='=' ) return new SnippetsQword_ExactForm_c; switch ( tWord.m_uStarPosition ) { case STAR_NONE: return new SnippetsQword_Exact_c; case STAR_FRONT: return new SnippetsQword_StarFront_c; case STAR_BACK: return new SnippetsQword_StarBack_c; case STAR_BOTH: return new SnippetsQword_StarBoth_c; default: assert ( "impossible star position" && 0 ); return NULL; } } bool SnippetsQwordSetup::QwordSetup ( ISphQword * pQword ) const { ISnippetsQword * pWord = dynamic_cast ( pQword ); if ( !pWord ) assert ( "query word setup failed" && 0 ); pWord->m_iLastIndex = m_pGenerator->m_iLastWord; pWord->m_uWordMask = 1 << (m_pGenerator->m_iQwordCount++); pWord->m_dTokens = &(m_pGenerator->m_dTokens); pWord->m_sBuffer = &(m_pGenerator->m_sBuffer); pWord->m_pTokenizer = m_pTokenizer; pWord->m_uFoundWords = &m_pGenerator->m_uFoundWords; pWord->m_iDocs = 1; pWord->m_iHits = 1; pWord->m_bHasHitlist = true; const char * sWord = pWord->OnSetup ( m_pDict ); // add dummy word, used for passage weighting const int iLength = m_pTokenizer->IsUtf8() ? sphUTF8Len ( sWord ) : strlen ( sWord ); m_pGenerator->m_dWords.Add().m_iLengthCP = iLength; m_pGenerator->m_dKeywords.Add().m_iLength = iLength; return true; } ///////////////////////////////////////////////////////////////////////////// inline bool operator < ( const ExcerptGen_c::Token_t & a, const ExcerptGen_c::Token_t & b ) { if ( a.m_iLengthCP==b.m_iLengthCP ) return a.m_iStart > b.m_iStart; return a.m_iLengthCP < b.m_iLengthCP; } inline bool operator < ( const ExcerptGen_c::Passage_t & a, const ExcerptGen_c::Passage_t & b ) { if ( a.GetWeight()==b.GetWeight() ) return a.m_iCodes < b.m_iCodes; return a.GetWeight() < b.GetWeight(); } ExcerptGen_c::ExcerptGen_c ( bool bUtf8 ) { m_iQwordCount = 0; m_bExactPhrase = false; m_pMarker = NULL; m_uFoundWords = 0; m_bUtf8 = bUtf8; m_iTotalCP = 0; } void ExcerptGen_c::AddBoundary() { Token_t & tLast = m_dTokens.Add(); tLast.Reset(); tLast.m_eType = TOK_BREAK; } void ExcerptGen_c::AddJunk ( int iStart, int iLength, int iBoundary ) { assert ( iLength>0 ); #ifdef PARANOID assert ( iLength<=m_sBuffer.Length() ); assert ( iStart+iLength<=m_sBuffer.Length() ); #endif int iChunkStart = iStart; int iSaved = 0; for ( int i = iStart; i < iStart+iLength; i++ ) if ( sphIsSpace ( m_sBuffer.cstr () [i] )!=sphIsSpace ( m_sBuffer.cstr () [iChunkStart] ) ) { Token_t & tLast = m_dTokens.Add(); tLast.Reset(); tLast.m_eType = TOK_SPACE; tLast.m_iStart = iChunkStart; tLast.m_iLengthBytes = tLast.m_iLengthCP = i - iChunkStart; if ( m_bUtf8 ) tLast.m_iLengthCP = sphUTF8Len ( m_sBuffer.cstr() + tLast.m_iStart, tLast.m_iLengthBytes ); m_iTotalCP += tLast.m_iLengthCP; iChunkStart = i; iSaved += tLast.m_iLengthBytes; if ( iBoundary!=-1 && iSaved > ( iBoundary-iStart ) ) { AddBoundary(); iBoundary = -1; } } Token_t & tLast = m_dTokens.Add(); tLast.Reset(); tLast.m_eType = TOK_SPACE; tLast.m_iStart = iChunkStart; tLast.m_iLengthBytes = tLast.m_iLengthCP = iStart + iLength - iChunkStart; if ( m_bUtf8 ) tLast.m_iLengthCP = sphUTF8Len ( m_sBuffer.cstr() + tLast.m_iStart, tLast.m_iLengthBytes ); m_iTotalCP += tLast.m_iLengthCP; if ( iBoundary!=-1 ) AddBoundary(); } void ExcerptGen_c::TokenizeQuery ( const ExcerptQuery_t & tQuery, CSphDict * pDict, ISphTokenizer * pTokenizer, const CSphIndexSettings & tSettings ) { // tokenize query words int iWordsLength = strlen ( tQuery.m_sWords.cstr() ); m_dKeywords.Reserve ( MAX_HIGHLIGHT_WORDS ); BYTE * sWord; int iKwIndex = 0; int uPosition = 0; pTokenizer->SetBuffer ( (BYTE *)tQuery.m_sWords.cstr(), iWordsLength ); while ( ( sWord = pTokenizer->GetToken() )!=NULL ) { SphWordID_t iWord = pDict->GetWordID ( sWord ); bool bIsStopWord = false; if ( !iWord ) bIsStopWord = pDict->IsStopWord ( sWord ); if ( !pTokenizer->TokenIsBlended() ) { uPosition += pTokenizer->GetOvershortCount(); if ( pTokenizer->GetBoundary() ) uPosition += tSettings.m_iBoundaryStep; if ( iWord || bIsStopWord ) uPosition = bIsStopWord ? uPosition+tSettings.m_iStopwordStep : uPosition+1; } if ( iWord ) { Token_t & tLast = m_dWords.Add(); tLast.m_eType = TOK_WORD; tLast.m_iWordID = iWord; tLast.m_iLengthBytes = tLast.m_iLengthCP = strlen ( (const char *)sWord ); if ( m_bUtf8 ) tLast.m_iLengthCP = sphUTF8Len ( (const char *)sWord ); m_iTotalCP += tLast.m_iLengthCP; tLast.m_uPosition = uPosition; // store keyword Keyword_t & kwLast = m_dKeywords.Add(); kwLast.m_iLength = tLast.m_iLengthCP; // find stars bool bStarBack = ( *pTokenizer->GetTokenEnd()=='*' ); bool bStarFront = ( pTokenizer->GetTokenStart()!=pTokenizer->GetBufferPtr() ) && ( pTokenizer->GetTokenStart()[-1]=='*' ); kwLast.m_uStar = ( bStarFront ? STAR_FRONT : 0 ) | ( bStarBack ? STAR_BACK : 0 ); // store token const int iEndIndex = iKwIndex + tLast.m_iLengthBytes + 1; m_dKeywordsBuffer.Resize ( iEndIndex ); kwLast.m_iWord = iKwIndex; strcpy ( &m_dKeywordsBuffer [ iKwIndex ], (const char *)sWord ); // NOLINT iKwIndex = iEndIndex; if ( m_dWords.GetLength()==MAX_HIGHLIGHT_WORDS ) break; } } } static int FindTagEnd ( const char * sData ) { assert ( *sData=='<' ); const char * s = sData+1; // we just scan until EOLN or tag end while ( *s && *s!='>' ) { // exit on duplicate if ( *s=='<' ) return -1; if ( *s=='\'' || *s=='"' ) s = (const char *)SkipQuoted ( (const BYTE *)s ); else s++; } if ( !*s ) return -1; return s-sData; } uint64_t sphPackZone ( DWORD uPosition, int iSiblingIndex, int iZoneType ) { assert ( iSiblingIndex>=0 && iSiblingIndex=0 && iZoneType & hZones ) { CSphString sZone; sZone.SetBinary ( sZoneName, iZoneNameLen ); int * pZoneIndex = hZones ( sZone ); if ( pZoneIndex ) return *pZoneIndex; int iZone = hZones.GetLength(); hZones.Add ( iZone, sZone ); return iZone; } // FIXME! unify with global static void TokenizeDocument somehow, lots of common code void ExcerptGen_c::TokenizeDocument ( char * pData, int iDataLen, CSphDict * pDict, ISphTokenizer * pTokenizer, bool bFillMasks, const ExcerptQuery_t & q, const CSphIndexSettings & tSettings ) { assert ( q.m_sStripMode!="retain" ); bool bQueryMode = q.m_bHighlightQuery; int iSPZ = q.m_iPassageBoundary; m_iTotalCP = 0; m_iDocumentWords = 0; m_dTokens.Reserve ( Max ( iDataLen/4, 256 ) ); // len/tok ratio ranged 2.8 to 3.2 on my testing data m_sBuffer = pData; pTokenizer->SetBuffer ( (BYTE*)pData, iDataLen ); const char * pStartPtr = pTokenizer->GetBufferPtr (); const char * pLastTokenEnd = pStartPtr; assert ( pStartPtr && pLastTokenEnd ); CSphVector dZoneStack; CSphVector dExactPhrase; if ( m_bExactPhrase ) dExactPhrase.Reserve ( m_dWords.GetLength() ); BYTE * sWord; DWORD uPosition = 0; // hit position in document SphWordID_t iBlendID = 0; const char * pBlendedEnd = NULL; while ( ( sWord = pTokenizer->GetToken() )!=NULL ) { if ( pTokenizer->TokenIsBlended() ) { if ( pBlendedEndGetTokenEnd() ) { iBlendID = pDict->GetWordID ( sWord ); pBlendedEnd = pTokenizer->GetTokenEnd(); } continue; } uPosition += pTokenizer->GetOvershortCount(); const char * pTokenStart = pTokenizer->GetTokenStart (); if ( pTokenStart!=pStartPtr && pTokenStart>pLastTokenEnd ) { AddJunk ( pLastTokenEnd - pStartPtr, pTokenStart - pLastTokenEnd, pTokenizer->GetBoundary() ? pTokenizer->GetBoundaryOffset() : -1 ); pLastTokenEnd = pTokenStart; } // handle SPZ tokens GE then needed // add SENTENCE, PARAGRAPH, ZONE token, do junks and tokenizer and pLastTokenEnd fix up // FIXME!!! it heavily depends on such this attitude MAGIC_CODE_SENTENCE < MAGIC_CODE_PARAGRAPH < MAGIC_CODE_ZONE if ( *sWord==MAGIC_CODE_SENTENCE || *sWord==MAGIC_CODE_PARAGRAPH || *sWord==MAGIC_CODE_ZONE ) { // SPZ token has position and could be last token too uPosition += ( iSPZ && *sWord>=iSPZ ); if ( iSPZ && *sWord>=iSPZ && ( m_dTokens.GetLength()==0 || m_dTokens.Last().m_eType!=TOK_SPZ ) ) { BYTE * sWordSPZ = sWord; if ( (*sWord)==MAGIC_CODE_SENTENCE ) sWordSPZ = (BYTE *)MAGIC_WORD_SENTENCE; else if ( (*sWord)==MAGIC_CODE_PARAGRAPH ) sWordSPZ = (BYTE *)MAGIC_WORD_PARAGRAPH; Token_t & tLast = m_dTokens.Add(); tLast.Reset(); tLast.m_eType = TOK_SPZ; tLast.m_iWordID = pDict->GetWordID ( sWordSPZ ); tLast.m_uPosition = uPosition; if ( *sWord==MAGIC_CODE_SENTENCE ) { tLast.m_iStart = pTokenStart-pStartPtr; tLast.m_iLengthBytes = tLast.m_iLengthCP = 1; m_iTotalCP++; } // SPZ token has position and could be last token too m_iLastWord = m_dTokens.GetLength(); pLastTokenEnd = pTokenizer->GetTokenEnd(); // fix it up to prevent adding last chunk on exit } if ( *sWord==MAGIC_CODE_ZONE ) { const char * pEnd = pTokenizer->GetBufferPtr(); const char * pTagStart = pEnd; while ( *pEnd && *pEnd!=MAGIC_CODE_ZONE ) pEnd++; pEnd++; // skip zone token too pTokenizer->SetBufferPtr ( pEnd ); pLastTokenEnd = pEnd; // fix it up to prevent adding last chunk on exit // span's management if ( *pTagStart!='/' ) // open zone { // zone stack management int iSelf = m_dZones.GetLength(); dZoneStack.Add ( iSelf ); // add zone itself int iZone = FindAddZone ( pTagStart, pEnd-pTagStart-1, m_hZones ); m_dZones.Add ( sphPackZone ( uPosition, iSelf, iZone ) ); // zone position in characters m_dZonePos.Add ( pTagStart-pStartPtr ); // for open zone the parent is the zone itself m_dZoneParent.Add ( iZone ); } else // close zone { #ifndef NDEBUG // lets check open - close tags match assert ( dZoneStack.GetLength() && dZoneStack.Last()>32 ) & UINT32_MASK ); assert ( iZone==(int)( uOpenPacked & UINT16_MASK ) ); // check for zone's types match; m_dZones[iOpen] = sphPackZone ( uOpenPos, iClose, iZone ); m_dZones.Add ( sphPackZone ( uPosition, iOpen, iZone ) ); // zone position in characters m_dZonePos.Add ( pTagStart-pStartPtr ); // for close zone the parent is the previous zone on stack int iParentZone = dZoneStack.GetLength()>2 ? dZoneStack[dZoneStack.GetLength()-2] : 0; uint64_t uParentPacked = m_dZones.GetLength() && iParentZoneGetTokenEnd (); SphWordID_t iExactID = 0; if ( bQueryMode && tSettings.m_bIndexExactWords ) { BYTE sBuf [ 3*SPH_MAX_WORD_LEN+4 ]; int iBytes = pLastTokenEnd - pTokenStart; if ( iBytes+2>(int)sizeof(sBuf) ) iBytes = (int)sizeof(sBuf)-2; memcpy ( sBuf + 1, sWord, iBytes ); sBuf[0] = MAGIC_WORD_HEAD_NONSTEMMED; sBuf[iBytes+1] = '\0'; iExactID = pDict->GetWordIDNonStemmed ( sBuf ); } SphWordID_t iWord = pDict->GetWordID ( sWord ); if ( pTokenizer->GetBoundary() ) uPosition += tSettings.m_iBoundaryStep; bool bIsStopWord = false; if ( !iWord ) bIsStopWord = pDict->IsStopWord ( sWord ); if ( iWord || bIsStopWord ) uPosition = bIsStopWord ? uPosition+tSettings.m_iStopwordStep : uPosition+1; if ( !pTokenizer->TokenIsBlendedPart() ) iBlendID = 0; Token_t & tLast = m_dTokens.Add(); tLast.m_eType = ( iWord || bIsStopWord ) ? TOK_WORD : TOK_SPACE; tLast.m_uPosition = ( iWord || bIsStopWord ) ? uPosition : 0; tLast.m_iStart = pTokenStart - pStartPtr; tLast.m_iLengthBytes = tLast.m_iLengthCP = pLastTokenEnd - pTokenStart; if ( m_bUtf8 && ( iWord || bIsStopWord ) ) tLast.m_iLengthCP = sphUTF8Len ( pTokenStart, tLast.m_iLengthBytes ); m_iTotalCP += tLast.m_iLengthCP; tLast.m_iWordID = iWord; tLast.m_iBlendID = iBlendID; tLast.m_uWords = 0; if ( iWord || bIsStopWord ) m_iDocumentWords++; m_iLastWord = iWord ? m_dTokens.GetLength() - 1 : m_iLastWord; // fill word mask tLast.m_uWords = 0; if ( bFillMasks && iWord ) { bool bMatch = false; int iOffset; ARRAY_FOREACH ( nWord, m_dWords ) { const char * sKeyword = &m_dKeywordsBuffer [ m_dKeywords[nWord].m_iWord ]; const Token_t & tToken = m_dWords[nWord]; switch ( m_dKeywords[nWord].m_uStar ) { case STAR_NONE: bMatch = ( iWord==tToken.m_iWordID || iExactID==tToken.m_iWordID ); break; case STAR_FRONT: iOffset = tLast.m_iLengthBytes - tToken.m_iLengthBytes; bMatch = ( iOffset>=0 ) && ( memcmp ( sKeyword, sWord + iOffset, tToken.m_iLengthBytes )==0 ); break; case STAR_BACK: bMatch = ( tLast.m_iLengthBytes>=tToken.m_iLengthBytes ) && ( memcmp ( sKeyword, sWord, tToken.m_iLengthBytes )==0 ); break; case STAR_BOTH: bMatch = strstr ( (const char *)sWord, sKeyword )!=NULL; break; } if ( bMatch ) { tLast.m_uWords |= 1UL<GetBufferEnd() ) { int iOffset = pTokenizer->GetBoundary() ? pTokenizer->GetBoundaryOffset() : -1; AddJunk ( pLastTokenEnd - pStartPtr, pTokenizer->GetBufferEnd () - pLastTokenEnd, iOffset ); } Token_t & tLast = m_dTokens.Add(); tLast.Reset(); } void ExcerptGen_c::MarkHits () { assert ( m_pMarker ); // mark CSphVector dMarked; dMarked.Reserve ( m_dTokens.GetLength() ); m_pMarker->Mark ( dMarked ); // fix-up word masks int iMarked = dMarked.GetLength(); int iTokens = m_dTokens.GetLength(); int i = 0, k = 0; while ( i < iTokens ) { // sync while ( k < iMarked && m_dTokens[i].m_uPosition > dMarked[k].m_uPosition ) k++; if ( k==iMarked ) // no more marked hits, clear tail { for ( ; i < iTokens; i++ ) m_dTokens[i].m_uWords = 0; break; } // clear false matches while ( dMarked[k].m_uPosition > m_dTokens[i].m_uPosition ) { m_dTokens[i++].m_uWords = 0; assert ( i=1 ); while ( dMarked[k].m_uSpan-- ) { i++; while ( i < iTokens && !m_dTokens[i].m_uPosition ) i++; } } } void ExcerptGen_c::SetExactPhrase ( const ExcerptQuery_t & tQuery ) { m_bExactPhrase = tQuery.m_bExactPhrase && ( m_dWords.GetLength()>1 ); } char * ExcerptGen_c::BuildExcerpt ( const ExcerptQuery_t & tQuery ) { m_iPassageId = tQuery.m_iPassageId; if ( tQuery.m_bHighlightQuery ) MarkHits(); // assign word weights ARRAY_FOREACH ( i, m_dWords ) m_dWords[i].m_iWeight = m_dWords[i].m_iLengthCP; // FIXME! should obtain freqs from dict // reset result m_dResult.Reserve ( 16384 ); m_dResult.Resize ( 0 ); m_iResultLen = 0; // do highlighting if ( ( tQuery.m_iLimit<=0 || tQuery.m_iLimit>m_iTotalCP ) && ( tQuery.m_iLimitWords<=0 || tQuery.m_iLimitWords>m_iDocumentWords ) ) { HighlightAll ( tQuery ); } else { if ( !( ExtractPassages ( tQuery ) && HighlightBestPassages ( tQuery ) ) ) { if ( !tQuery.m_bAllowEmpty ) HighlightStart ( tQuery ); } } // alloc, fill and return the result m_dResult.Add ( 0 ); char * pRes = new char [ m_dResult.GetLength() ]; memcpy ( pRes, &m_dResult[0], m_dResult.GetLength() ); m_dResult.Reset (); return pRes; } void ExcerptGen_c::HighlightPhrase ( const ExcerptQuery_t & q, int iTok, int iEnd ) { int iPhrase = 0; for ( ; iTok<=iEnd; iTok++ ) { bool bQWord = m_dTokens[iTok].m_uWords!=0; if ( bQWord && iPhrase==0 ) ResultEmit ( q.m_sBeforeMatch.cstr(), q.m_bHasBeforePassageMacro, m_iPassageId, q.m_sBeforeMatchPassage.cstr() ); ResultEmit ( m_dTokens[iTok] ); iPhrase += bQWord ? 1 : 0; if ( bQWord && iPhrase==m_dWords.GetLength() ) { ResultEmit ( q.m_sAfterMatch.cstr(), q.m_bHasAfterPassageMacro, m_iPassageId++, q.m_sAfterMatchPassage.cstr() ); iPhrase = 0; } } } void ExcerptGen_c::HighlightAll ( const ExcerptQuery_t & q ) { bool bOpen = false; const int iMaxTok = m_dTokens.GetLength()-1; // skip last one, it's TOK_NONE if ( m_bExactPhrase ) { HighlightPhrase ( q, 0, iMaxTok ); } else { // bag of words for ( int iTok=0; iTok=m_dTokens.GetLength() ) break; } ResultEmit ( q.m_sChunkSeparator.cstr() ); } void ExcerptGen_c::ResultEmit ( const char * sLine, bool bHasMacro, int iPassageId, const char * sPostPassage ) { // plain old emit while ( sLine && *sLine ) { assert ( (*(BYTE*)sLine)<128 ); m_dResult.Add ( *sLine++ ); m_iResultLen++; } if ( !bHasMacro ) return; char sBuf[16]; int iPassLen = snprintf ( sBuf, sizeof(sBuf), "%d", iPassageId ); for ( int i=0; i=0 ); assert ( tSpan.m_iWords==iWord+1 ); // calc final weight tPass.m_iQwordsWeight = 0; tPass.m_iQwordCount = 0; DWORD uWords = tPass.m_uQwords; for ( int iWord=0; uWords; uWords >>= 1, iWord++ ) if ( uWords & 1 ) { tPass.m_iQwordsWeight += m_dWords[iWord].m_iWeight; tPass.m_iQwordCount++; } tPass.m_iMaxLCS *= iMaxWords; tPass.m_iQwordCount *= iWordCountCoeff; } void ExcerptGen_c::UpdateGaps ( Passage_t & tPass, const TokenSpan_t & tSpan, int iMaxWords ) { tPass.m_iMinGap = iMaxWords-1; tPass.m_iAroundBefore = tPass.m_iAroundAfter = 0; DWORD uQwords = 0; int iWord = -1; for ( int iTok=tSpan.m_iStart; iTok<=tSpan.m_iEnd; iTok++ ) { Token_t & tTok = m_dTokens[iTok]; if ( tTok.m_eType!=TOK_WORD ) continue; iWord++; if ( tTok.m_uWords ) { tPass.m_iMinGap = Min ( tPass.m_iMinGap, iWord ); tPass.m_iMinGap = Min ( tPass.m_iMinGap, tSpan.m_iWords-1-iWord ); } uQwords |= tTok.m_uWords; tPass.m_iAroundBefore += ( uQwords==0 ); tPass.m_iAroundAfter = ( tTok.m_uWords ? 0 : tPass.m_iAroundAfter+1 ); } assert ( tPass.m_iMinGap>=0 ); } static int GetWordsLimit ( const ExcerptQuery_t & q, int iQwords ) { int iSoftLimit = 2*q.m_iAround + iQwords; if ( q.m_iLimitWords ) return Min ( iSoftLimit, q.m_iLimitWords ); return iSoftLimit; } bool ExcerptGen_c::SetupWindow ( TokenSpan_t & tSpan, Passage_t & tPass, int i, int iCpLimit, const ExcerptQuery_t & q ) { assert ( i>=0 && i iCpLimit ) || tSpan.m_iWords>=GetWordsLimit ( q, tSpan.m_iQwords ) || tToken.m_eType==TOK_SPZ ) { tPass.m_iTokens += ( tToken.m_eType==TOK_SPZ && tToken.m_iLengthBytes>0 ); // only MAGIC_CODE_SENTENCE has length return ( tToken.m_eType==TOK_SPZ ); } // got token, update passage tPass.m_iTokens++; tPass.m_iCodes += tToken.m_iLengthCP; if ( tToken.m_eType==TOK_WORD ) tSpan.Add ( i, m_dTokens[i].m_uWords!=0 ); } return false; } bool ExcerptGen_c::FlushPassage ( const Passage_t & tPass, int iLCSThresh ) { if (!( tPass.m_uQwords && tPass.m_iMaxLCS>=iLCSThresh )) return false; // if it's the very first one, do add if ( !m_dPassages.GetLength() ) { m_dPassages.Add ( tPass ); return true; } // check if it's new or better Passage_t & tLast = m_dPassages.Last(); if ( ( tPass.m_iStartLimit<=tLast.m_iStartLimit && tLast.m_iEndLimit<=tPass.m_iEndLimit ) || ( tLast.m_iStartLimit<=tPass.m_iStartLimit && tPass.m_iEndLimit<=tLast.m_iEndLimit ) ) { int iPassPre = tPass.m_iStartLimit - tPass.m_iStart + 1; int iPassPost = tPass.m_iStart + tPass.m_iTokens - tPass.m_iEndLimit + 1; float fPassGap = (float)Max ( iPassPre, iPassPost ) / (float)Min ( iPassPre, iPassPost ); int iLastPre = tLast.m_iStartLimit - tLast.m_iStart + 1; int iLastPost = tLast.m_iStart + tLast.m_iTokens - tLast.m_iEndLimit + 1; float fLastGap = (float)Max ( iLastPre, iLastPost ) / (float)Min ( iLastPre, iLastPost ); // centered snippet wins last passage if ( tLast.GetWeight() iCpLimit || tSpan.m_iWords > GetWordsLimit ( q, tSpan.m_iQwords ) ) && tPass.m_iTokens!=1 ) { if ( m_dTokens[tPass.m_iStart].m_eType==TOK_WORD ) { // remove heading word from wordspan assert ( m_dTokens[tSpan.m_iStart].m_eType==TOK_WORD ); if ( m_dTokens[tSpan.m_iStart].m_uWords ) { tSpan.m_iQwords--; bQwordsChanged = true; } tSpan.m_iStart++; if ( tSpan.m_iStart > tSpan.m_iEnd ) { tSpan.Reset(); } else { tSpan.m_iWords--; while ( m_dTokens[tSpan.m_iStart].m_eType!=TOK_WORD ) tSpan.m_iStart++; } } tPass.m_iCodes -= m_dTokens[tPass.m_iStart].m_iLengthCP; tPass.m_iTokens--; tPass.m_iStart++; } } return m_dPassages.GetLength()!=0; } bool ExcerptGen_c::ExtractPhrases ( const ExcerptQuery_t & ) { int iMaxWords = 100; int iLCSThresh = m_bExactPhrase ? m_dWords.GetLength()*iMaxWords : 0; int iStart = 0; DWORD uWords = 0; ARRAY_FOREACH ( iTok, m_dTokens ) { // phrase boundary found, go flush if ( m_dTokens[iTok].m_eType==TOK_BREAK || m_dTokens[iTok].m_eType==TOK_NONE ) { int iEnd = iTok - 1; // emit non-empty phrases with matching words as passages if ( iStart=iLCSThresh ) { tPass.m_iWords = tSpan.m_iWords; m_dPassages.Add ( tPass ); } } if ( m_dTokens[iTok].m_eType==TOK_NONE ) break; iStart = iTok + 1; uWords = 0; } // just an incoming token if ( m_dTokens[iTok].m_eType==TOK_WORD ) uWords |= m_dTokens[iTok].m_uWords; } return m_dPassages.GetLength()!=0; } struct PassageOrder_fn { inline bool IsLess ( const ExcerptGen_c::Passage_t & a, const ExcerptGen_c::Passage_t & b ) const { return a.m_iStart < b.m_iStart; } }; bool ExcerptGen_c::HighlightBestPassages ( const ExcerptQuery_t & tQuery ) { assert ( m_dPassages.GetLength() ); // needed for "slightly outta limit" check below int iKeywordsLength = 0; ARRAY_FOREACH ( i, m_dKeywords ) iKeywordsLength += m_dKeywords[i].m_iLength; // our limits int iMaxPassages = tQuery.m_iLimitPassages ? Min ( m_dPassages.GetLength(), tQuery.m_iLimitPassages ) : m_dPassages.GetLength(); int iMaxWords = tQuery.m_iLimitWords ? tQuery.m_iLimitWords : INT_MAX; int iMaxCp = tQuery.m_iLimit ? tQuery.m_iLimit : INT_MAX; // our best passages CSphVector dShow; DWORD uWords = 0; // mask of words in dShow so far int iTotalCodes = 0; int iTotalWords = 0; bool bAroundComply = true; CSphVector dWeights ( m_dPassages.GetLength() ); ARRAY_FOREACH ( i, m_dPassages ) dWeights[i] = m_dPassages[i].m_iQwordsWeight; // collect enough best passages to show all keywords and max out the limits // don't care much if we're going over limits in this loop, it will be tightened below bool bAll = false; while ( dShow.GetLength() < iMaxPassages ) { // get next best passage int iBest = -1; ARRAY_FOREACH ( i, m_dPassages ) { if ( m_dPassages[i].m_iCodes && ( iBest==-1 || m_dPassages[iBest] < m_dPassages[i] ) ) iBest = i; } if ( iBest<0 ) break; Passage_t & tBest = m_dPassages[iBest]; // does this passage fit the limits? bool bFits = ( iTotalCodes + tBest.m_iCodes<=iMaxCp ) && ( iTotalWords + tBest.m_iWords<=iMaxWords ); bAroundComply &= ( Max ( tBest.m_iAroundBefore, tBest.m_iAroundAfter )<=tQuery.m_iAround ); // all words will be shown and we're outta limit if ( uWords==m_uFoundWords && !bFits ) { // there might be just enough space to partially display this passage if ( ( iTotalCodes + iKeywordsLength )<=tQuery.m_iLimit ) dShow.Add ( tBest ); break; } // save it, despite limits or whatever, we'll tighten everything in the loop below dShow.Add ( tBest ); uWords |= tBest.m_uQwords; iTotalWords += tBest.m_iWords; iTotalCodes += tBest.m_iCodes; tBest.m_iCodes = 0; // no longer needed here, abusing to mark displayed passages // we just managed to show all words? do one final re-weighting run if ( !bAll && uWords==m_uFoundWords ) { bAll = true; ARRAY_FOREACH ( i, m_dPassages ) m_dPassages[i].m_iQwordsWeight = dWeights[i]; } // if we're already showing all words, re-weighting is not needed any more if ( bAll ) continue; // re-weight passages, adjust for new mask of shown words ARRAY_FOREACH ( i, m_dPassages ) { if ( !m_dPassages[i].m_iCodes ) continue; DWORD uMask = tBest.m_uQwords; for ( int iWord=0; uMask; iWord++, uMask >>= 1 ) if ( ( uMask & 1 ) && ( m_dPassages[i].m_uQwords & ( 1UL< iMaxCp || iTotalWords > iMaxWords ) && !tQuery.m_bUseBoundaries ) { // trim passages bool bFirst = true; bool bDone = false; int iCodes = iTotalCodes; while ( !bDone ) { // drop one token from each passage starting from the least relevant for ( int i=dShow.GetLength(); i > 0; i-- ) { Passage_t & tPassage = dShow[i-1]; int iFirst = tPassage.m_iStart; int iLast = tPassage.m_iStart + tPassage.m_iTokens - 1; if ( iFirst!=tPassage.m_iStartLimit && ( bFirst || iLast==tPassage.m_iEndLimit ) ) { // drop first if ( ( tQuery.m_bForceAllWords && m_dTokens[tPassage.m_iStart].m_uWords==0 ) || !tQuery.m_bForceAllWords ) tPassage.m_iStart++; tPassage.m_iTokens--; tPassage.m_iCodes -= m_dTokens[iFirst].m_iLengthCP; iTotalCodes -= m_dTokens[iFirst].m_iLengthCP; iTotalWords -= ( m_dTokens[iFirst].m_eType==TOK_WORD ); } else if ( iLast!=tPassage.m_iEndLimit ) { // drop last if ( ( tQuery.m_bForceAllWords && m_dTokens[tPassage.m_iStart+tPassage.m_iTokens-1].m_uWords==0 ) || !tQuery.m_bForceAllWords ) tPassage.m_iTokens--; tPassage.m_iCodes -= m_dTokens[iLast].m_iLengthCP; iTotalCodes -= m_dTokens[iLast].m_iLengthCP; iTotalWords -= ( m_dTokens[iLast].m_eType==TOK_WORD ); } if ( iTotalCodes<=iMaxCp && iTotalWords<=iMaxWords ) { bDone = true; break; } } if ( iTotalCodes==iCodes ) break; // couldn't reduce anything iCodes = iTotalCodes; bFirst = !bFirst; } } // if passages still don't fit start dropping least significant ones, limit is sacred. while ( ( iTotalCodes > iMaxCp || iTotalWords > iMaxWords ) && !tQuery.m_bForceAllWords ) { iTotalCodes -= dShow.Last().m_iCodes; iTotalWords -= dShow.Last().m_iWords; dShow.RemoveFast ( dShow.GetLength()-1 ); } if ( !dShow.GetLength() ) return false; // sort passages in the document order if ( !tQuery.m_bWeightOrder ) dShow.Sort ( PassageOrder_fn() ); /// show int iLast = -1; bool bEmitZones = tQuery.m_bEmitZones && m_dZones.GetLength(); ARRAY_FOREACH ( i, dShow ) { int iTok = dShow[i].m_iStart; int iEnd = iTok + dShow[i].m_iTokens - 1; if ( ( iLast>=0 && iLast1+iLast || tQuery.m_bWeightOrder ) { ResultEmit ( tQuery.m_sChunkSeparator.cstr() ); // find and emit most enclosing zone if ( bEmitZones ) { int iHighlightStart = m_dTokens[iTok].m_iStart; int iZone = FindSpan ( m_dZonePos, iHighlightStart ); if ( iZone!=-1 ) { int iParent = m_dZoneParent[iZone]; m_hZones.IterateStart(); while ( m_hZones.IterateNext() ) { if ( m_hZones.IterateGet()!=iParent ) continue; ResultEmit ( "<" ); ResultEmit ( m_hZones.IterateGetKey().cstr() ); ResultEmit ( ">" ); break; } } } } if ( m_bExactPhrase ) HighlightPhrase ( tQuery, iTok, iEnd ); else { while ( iTok<=iEnd ) { if ( iTok>iLast || tQuery.m_bWeightOrder ) { if ( m_dTokens[iTok].m_uWords ) { ResultEmit ( tQuery.m_sBeforeMatch.cstr(), tQuery.m_bHasBeforePassageMacro, m_iPassageId, tQuery.m_sBeforeMatchPassage.cstr() ); ResultEmit ( m_dTokens[iTok] ); ResultEmit ( tQuery.m_sAfterMatch.cstr(), tQuery.m_bHasAfterPassageMacro, m_iPassageId++, tQuery.m_sAfterMatchPassage.cstr() ); } else ResultEmit ( m_dTokens[iTok] ); } iTok++; } } iLast = tQuery.m_bWeightOrder ? iEnd : Max ( iLast, iEnd ); } if ( m_dTokens[iLast].m_eType!=TOK_NONE && m_dTokens[iLast+1].m_eType!=TOK_NONE ) ResultEmit ( tQuery.m_sChunkSeparator.cstr() ); return true; } ////////////////////////////////////////////////////////////////////////// // FAST PATH FOR FULL DOCUMENT HIGHLIGHTING ////////////////////////////////////////////////////////////////////////// struct DocQueryZonePair_t { int m_iDoc; int m_iQuery; bool operator<( const DocQueryZonePair_t & b ) const { return m_iDoc( const DocQueryZonePair_t & b ) const { return m_iDoc>b.m_iDoc; } bool operator==( const DocQueryZonePair_t & b ) const { return m_iDoc==b.m_iDoc; } }; /// hit-in-zone check implementation for the matching engine class SnippetZoneChecker_c : public ISphZoneCheck { private: struct ZoneHits_t { CSphVector m_dOpen; CSphVector m_dClose; }; CSphVector m_dZones; public: SnippetZoneChecker_c ( const CSphVector & dDocZones, const SmallStringHash_T & hDocNames, const CSphVector & dQueryZones ) { if ( !dQueryZones.GetLength() ) return; CSphVector dCheckedZones; ARRAY_FOREACH ( i, dQueryZones ) { int * pZone = hDocNames ( dQueryZones[i] ); if ( pZone ) { DocQueryZonePair_t & tPair = dCheckedZones.Add (); tPair.m_iDoc = *pZone; tPair.m_iQuery = i; } } dCheckedZones.Sort(); m_dZones.Resize ( dQueryZones.GetLength() ); ARRAY_FOREACH ( i, dDocZones ) { uint64_t uZonePacked = dDocZones[i]; DWORD uPos = (DWORD)( ( uZonePacked >>32 ) & UINT32_MASK ); int iSibling = (int)( ( uZonePacked>>16 ) & UINT16_MASK ); int iZone = (int)( uZonePacked & UINT16_MASK ); assert ( iSibling>=0 && iSibling close zone position // + zone type not in query zones if ( iSibling<=i || uPos>=( ( dDocZones[iSibling]>>32 ) & UINT32_MASK ) ) continue; DocQueryZonePair_t tRefZone; tRefZone.m_iDoc = iZone; const DocQueryZonePair_t * pPair = dCheckedZones.BinarySearch ( tRefZone ); if ( !pPair ) continue; uint64_t uClosePacked = dDocZones[iSibling]; DWORD uClosePos = ( (int)( uClosePacked>>32 ) & UINT32_MASK ); ZoneHits_t & tZone = m_dZones[pPair->m_iQuery]; tZone.m_dOpen.Add ( uPos ); tZone.m_dClose.Add ( uClosePos ); } #ifndef NDEBUG ARRAY_FOREACH ( i, m_dZones ) { const ZoneHits_t & tZone = m_dZones[i]; assert ( tZone.m_dOpen.GetLength()==tZone.m_dClose.GetLength() ); const Hitpos_t * pHit = tZone.m_dOpen.Begin()+1; const Hitpos_t * pMax = tZone.m_dOpen.Begin()+tZone.m_dOpen.GetLength(); for ( ; pHitm_uHitpos ); int iOpen = FindSpan ( m_dZones[iZone].m_dOpen, uPos ); return ( iOpen>=0 && uPos<=m_dZones[iZone].m_dClose[iOpen] ) ? SPH_ZONE_FOUND : SPH_ZONE_NO_SPAN; } }; ////////////////////////////////////////////////////////////////////////// /// mini-index for a single document /// keeps query words /// keeps hit lists for every query keyword class SnippetsDocIndex_c : public ISphNoncopyable { public: // document related DWORD m_uLastPos; CSphVector< CSphVector > m_dDocHits; // query parsing result XQQuery_t m_tQuery; protected: // query keywords and parsing stuff CSphVector m_dQueryWords; CSphVector m_dStarWords; CSphVector m_dStarBuffer; bool m_bQueryMode; bool m_bSentence; bool m_bParagraph; BYTE m_sTmpWord [ 3*SPH_MAX_WORD_LEN + 16 ]; public: explicit SnippetsDocIndex_c ( bool bQueryMode ); void SetupHits (); int FindWord ( SphWordID_t iWordID, const BYTE * sWord, int iWordLen ) const; void AddHits ( SphWordID_t iWordID, const BYTE * sWord, int iWordLen, DWORD uPosition ); bool Parse ( const char * sQuery, ISphTokenizer * pTokenizer, CSphDict * pDict, const CSphSchema * pSchema, CSphString & sError, int iStopwordStep ); int GetSPZ () const; protected: bool MatchStar ( const ExcerptGen_c::Keyword_t & tTok, const BYTE * sWord, int iWordLen ) const; void AddWord ( SphWordID_t iWordID ); void AddWord ( const char * sWord, int iStarPosition ); void ExtractWords ( XQNode_t * pNode, CSphDict * pDict ); }; SnippetsDocIndex_c::SnippetsDocIndex_c ( bool bQueryMode ) : m_uLastPos ( 0 ) , m_bQueryMode ( bQueryMode ) , m_bSentence ( false ) , m_bParagraph ( false ) {} void SnippetsDocIndex_c::SetupHits () { m_dDocHits.Resize ( m_dQueryWords.GetLength() + m_dStarWords.GetLength() ); m_uLastPos = 0; } bool SnippetsDocIndex_c::MatchStar ( const ExcerptGen_c::Keyword_t & tTok, const BYTE * sWord, int iWordLen ) const { assert ( tTok.m_uStar!=STAR_NONE ); const BYTE * sKeyword = m_dStarBuffer.Begin() + tTok.m_iWord; switch ( tTok.m_uStar ) { case STAR_FRONT: { int iOffset = iWordLen - tTok.m_iLength; return ( iOffset>=0 ) && ( memcmp ( sKeyword, sWord + iOffset, tTok.m_iLength )==0 ); } case STAR_BACK: return ( iWordLen>=tTok.m_iLength ) && ( memcmp ( sKeyword, sWord, tTok.m_iLength )==0 ); case STAR_BOTH: return strstr ( (const char *)sWord, (const char *)sKeyword )!=NULL; } return false; } int SnippetsDocIndex_c::FindWord ( SphWordID_t iWordID, const BYTE * sWord, int iWordLen ) const { const SphWordID_t * pQueryID = iWordID ? m_dQueryWords.BinarySearch ( iWordID ) : NULL; if ( pQueryID ) return pQueryID - m_dQueryWords.Begin(); if ( sWord && iWordLen ) ARRAY_FOREACH ( i, m_dStarWords ) if ( MatchStar ( m_dStarWords[i], sWord, iWordLen ) ) return i + m_dQueryWords.GetLength(); return -1; } void SnippetsDocIndex_c::AddHits ( SphWordID_t iWordID, const BYTE * sWord, int iWordLen, DWORD uPosition ) { assert ( m_dDocHits.GetLength()==m_dQueryWords.GetLength()+m_dStarWords.GetLength() ); const SphWordID_t * pQueryWord = ( iWordID ? m_dQueryWords.BinarySearch ( iWordID ) : NULL ); if ( pQueryWord ) { m_dDocHits [ pQueryWord - m_dQueryWords.Begin() ].Add ( uPosition ); return; } if ( sWord && iWordLen ) ARRAY_FOREACH ( i, m_dStarWords ) if ( MatchStar ( m_dStarWords[i], sWord, iWordLen ) ) m_dDocHits [ m_dQueryWords.GetLength() + i ].Add ( uPosition ); } bool SnippetsDocIndex_c::Parse ( const char * sQuery, ISphTokenizer * pTokenizer, CSphDict * pDict, const CSphSchema * pSchema, CSphString & sError, int iStopwordStep ) { if ( !m_bQueryMode ) { // parse bag-of-words query int iQueryLen = strlen ( sQuery ); // FIXME!!! get length as argument pTokenizer->SetBuffer ( (BYTE *)sQuery, iQueryLen ); BYTE * sWord = NULL; // FIXME!!! add warning on query words overflow while ( ( sWord = pTokenizer->GetToken() )!=NULL && ( m_dQueryWords.GetLength() + m_dStarWords.GetLength() )GetWordID ( sWord ); if ( !uWordID ) continue; bool bStarBack = ( *pTokenizer->GetTokenEnd()=='*' ); bool bStarFront = ( pTokenizer->GetTokenStart()!=pTokenizer->GetBufferPtr() ) && ( pTokenizer->GetTokenStart()[-1]=='*' ); int uStar = ( bStarFront ? STAR_FRONT : 0 ) | ( bStarBack ? STAR_BACK : 0 ); if ( uStar ) AddWord ( (const char *)sWord, uStar ); else AddWord ( uWordID ); } } else { // parse extended query if ( !sphParseExtendedQuery ( m_tQuery, sQuery, pTokenizer, pSchema, pDict, iStopwordStep ) ) { sError = m_tQuery.m_sParseError; return false; } m_tQuery.m_pRoot->ClearFieldMask(); ExtractWords ( m_tQuery.m_pRoot, pDict ); if ( m_bSentence ) { strncpy ( (char *)m_sTmpWord, MAGIC_WORD_SENTENCE, sizeof(m_sTmpWord) ); AddWord ( pDict->GetWordID ( m_sTmpWord ) ); } if ( m_bParagraph ) { strncpy ( (char *)m_sTmpWord, MAGIC_WORD_PARAGRAPH, sizeof(m_sTmpWord) ); AddWord ( pDict->GetWordID ( m_sTmpWord ) ); } // should be in sync with ExtRanker_c constructor ARRAY_FOREACH ( i, m_tQuery.m_dZones ) { snprintf ( (char *)m_sTmpWord, sizeof(m_sTmpWord), "%c%s", MAGIC_CODE_ZONE, m_tQuery.m_dZones[i].cstr() ); AddWord ( pDict->GetWordID ( m_sTmpWord ) ); } } // all ok, remove dupes, and return m_dQueryWords.Uniq(); assert ( !m_dStarWords.GetLength() || m_dStarBuffer.GetLength() ); return true; } int SnippetsDocIndex_c::GetSPZ () const { // with sentence in query we should consider SENTECE, PARAGRAPH, ZONE // with paragraph in query we should consider PARAGRAPH, ZONE // with zone in query we should consider ZONE if ( m_bSentence ) return MAGIC_CODE_SENTENCE; if ( m_bParagraph ) return MAGIC_CODE_PARAGRAPH; if ( m_tQuery.m_dZones.GetLength() ) return MAGIC_CODE_ZONE; return 0; } void SnippetsDocIndex_c::AddWord ( SphWordID_t iWordID ) { assert ( iWordID ); m_dQueryWords.Add ( iWordID ); } void SnippetsDocIndex_c::AddWord ( const char * sWord, int iStarPosition ) { int iLen = strlen ( sWord ); int iOff = m_dStarBuffer.GetLength(); m_dStarBuffer.Resize ( iOff+iLen+1 ); // reserve space for word + trailing zero memcpy ( &m_dStarBuffer[iOff], sWord, iLen ); m_dStarBuffer[iOff+iLen] = 0; ExcerptGen_c::Keyword_t & tTok = m_dStarWords.Add(); tTok.m_iWord = iOff; tTok.m_iLength = iLen; tTok.m_uStar = iStarPosition; } void SnippetsDocIndex_c::ExtractWords ( XQNode_t * pNode, CSphDict * pDict ) { m_bSentence |= ( pNode->GetOp()==SPH_QUERY_SENTENCE ); m_bParagraph |= ( pNode->GetOp()==SPH_QUERY_PARAGRAPH ); ARRAY_FOREACH ( i, pNode->m_dWords ) { const XQKeyword_t & tWord = pNode->m_dWords[i]; if ( tWord.m_uStarPosition ) { AddWord ( tWord.m_sWord.cstr(), tWord.m_uStarPosition ); } else { strncpy ( (char *)m_sTmpWord, tWord.m_sWord.cstr(), sizeof(m_sTmpWord) ); SphWordID_t iWordID = pDict->GetWordID ( m_sTmpWord ); if ( iWordID ) AddWord ( iWordID ); } } ARRAY_FOREACH ( i, pNode->m_dChildren ) ExtractWords ( pNode->m_dChildren[i], pDict ); } ////////////////////////////////////////////////////////////////////////// /// document token processor functor traits class TokenFunctorTraits_c : public ISphNoncopyable, public ExcerptQuery_t { public: CSphVector m_dZones; SmallStringHash_T m_hZones; CSphVector m_dResult; SnippetsDocIndex_c & m_tContainer; ISphTokenizer * m_pTokenizer; CSphDict * m_pDict; const char * m_pDoc; int m_iBoundaryStep; int m_iStopwordStep; bool m_bIndexExactWords; int m_iDocLen; int m_iMatchesCount; explicit TokenFunctorTraits_c ( SnippetsDocIndex_c & tContainer, ISphTokenizer * pTokenizer, CSphDict * pDict, const ExcerptQuery_t & tQuery, const CSphIndexSettings & tSettingsIndex, const char * sDoc, int iDocLen ) : m_tContainer ( tContainer ) , m_pTokenizer ( pTokenizer ) , m_pDict ( pDict ) , m_pDoc ( NULL ) , m_iBoundaryStep ( tSettingsIndex.m_iBoundaryStep ) , m_iStopwordStep ( tSettingsIndex.m_iStopwordStep ) , m_bIndexExactWords ( tSettingsIndex.m_bIndexExactWords ) , m_iDocLen ( iDocLen ) , m_iMatchesCount ( 0 ) { assert ( m_pTokenizer && m_pDict ); ExcerptQuery_t::operator = ( tQuery ); m_pTokenizer->SetBuffer ( (BYTE*)sDoc, m_iDocLen ); m_pDoc = m_pTokenizer->GetBufferPtr(); } ~TokenFunctorTraits_c () {} void ResultEmit ( const char * pSrc, int iLen, bool bHasPassageMacro=false, int iPassageId=0, const char * pPost=NULL, int iPostLen=0 ) { if ( iLen>0 ) { int iOutLen = m_dResult.GetLength(); m_dResult.Resize ( iOutLen+iLen ); memcpy ( &m_dResult[iOutLen], pSrc, iLen ); } if ( !bHasPassageMacro ) return; char sBuf[16]; int iPassLen = snprintf ( sBuf, sizeof(sBuf), "%d", iPassageId ); int iOutLen = m_dResult.GetLength(); m_dResult.Resize ( iOutLen + iPassLen + iPostLen ); if ( iPassLen ) memcpy ( m_dResult.Begin()+iOutLen, sBuf, iPassLen ); if ( iPostLen ) memcpy ( m_dResult.Begin()+iOutLen+iPassLen, pPost, iPostLen ); } virtual void OnOverlap ( int iStart, int iLen ) = 0; virtual void OnSkipHtml ( int iStart, int iLen ) = 0; virtual void OnToken ( int iStart, int iLen, const BYTE * sWord, DWORD uPosition, const CSphVector & dWordids ) = 0; virtual void OnSPZ ( BYTE iSPZ, DWORD uPosition, char * sZoneName ) = 0; virtual void OnTail ( int iStart, int iLen ) = 0; virtual void OnFinish () = 0; virtual const CSphVector * GetHitlist ( const XQKeyword_t & tWord ) const = 0; }; /// functor that processes tokens and collects matching keyword hits into mini-index class HitCollector_c : public TokenFunctorTraits_c { public: mutable BYTE m_sTmpWord [ 3*SPH_MAX_WORD_LEN + 16 ]; SphWordID_t m_uSentenceID; SphWordID_t m_uParagraphID; public: explicit HitCollector_c ( SnippetsDocIndex_c & tContainer, ISphTokenizer * pTokenizer, CSphDict * pDict, const ExcerptQuery_t & tQuery, const CSphIndexSettings & tSettingsIndex, const char * sDoc, int iDocLen ) : TokenFunctorTraits_c ( tContainer, pTokenizer, pDict, tQuery, tSettingsIndex, sDoc, iDocLen ) { strncpy ( (char *)m_sTmpWord, MAGIC_WORD_SENTENCE, sizeof(m_sTmpWord) ); m_uSentenceID = pDict->GetWordID ( m_sTmpWord ); strncpy ( (char *)m_sTmpWord, MAGIC_WORD_PARAGRAPH, sizeof(m_sTmpWord) ); m_uParagraphID = pDict->GetWordID ( m_sTmpWord ); m_tContainer.SetupHits(); } virtual ~HitCollector_c () {} virtual void OnToken ( int, int iLen, const BYTE * sWord, DWORD uPosition, const CSphVector & dWordids ) { bool bReal = false; ARRAY_FOREACH ( i, dWordids ) if ( dWordids[i] ) { m_tContainer.AddHits ( dWordids[i], sWord, iLen, uPosition ); bReal = true; } m_tContainer.m_uLastPos = bReal ? uPosition : m_tContainer.m_uLastPos; } virtual void OnSPZ ( BYTE iSPZ, DWORD uPosition, char * sZoneName ) { switch ( iSPZ ) { case MAGIC_CODE_SENTENCE: m_tContainer.AddHits ( m_uSentenceID, NULL, 0, uPosition ); break; case MAGIC_CODE_PARAGRAPH: m_tContainer.AddHits ( m_uParagraphID, NULL, 0, uPosition ); break; case MAGIC_CODE_ZONE: assert ( m_dZones.GetLength() ); assert ( ( ( m_dZones.Last()>>32 ) & UINT32_MASK )==uPosition ); assert ( sZoneName ); m_tContainer.AddHits ( m_pDict->GetWordID ( (BYTE *)sZoneName ), NULL, 0, uPosition ); break; default: assert ( 0 && "impossible SPZ" ); } m_tContainer.m_uLastPos = uPosition; } virtual const CSphVector * GetHitlist ( const XQKeyword_t & tWord ) const { int iWord = -1; if ( tWord.m_uStarPosition ) { iWord = m_tContainer.FindWord ( 0, (const BYTE *)tWord.m_sWord.cstr(), tWord.m_sWord.Length() ); } else { strncpy ( (char *)m_sTmpWord, tWord.m_sWord.cstr(), sizeof(m_sTmpWord) ); SphWordID_t iWordID = m_pDict->GetWordID ( m_sTmpWord ); if ( iWordID ) iWord = m_tContainer.FindWord ( iWordID, NULL, 0 ); } if ( iWord!=-1 ) return m_tContainer.m_dDocHits.Begin()+iWord; else return NULL; } virtual void OnOverlap ( int, int ) {} virtual void OnSkipHtml ( int, int ) {} virtual void OnTail ( int, int ) {} virtual void OnFinish () {} }; /// functor that matches tokens against query words from mini-index and highlights them class HighlightPlain_c : public TokenFunctorTraits_c { protected: int m_iBeforeLen; int m_iAfterLen; int m_iBeforePostLen; int m_iAfterPostLen; public: HighlightPlain_c ( SnippetsDocIndex_c & tContainer, ISphTokenizer * pTokenizer, CSphDict * pDict, const ExcerptQuery_t & tQuery, const CSphIndexSettings & tSettingsIndex, const char * sDoc, int iDocLen ) : TokenFunctorTraits_c ( tContainer, pTokenizer, pDict, tQuery, tSettingsIndex, sDoc, iDocLen ) , m_iBeforeLen ( tQuery.m_sBeforeMatch.Length() ) , m_iAfterLen ( tQuery.m_sAfterMatch.Length() ) , m_iBeforePostLen ( tQuery.m_sBeforeMatchPassage.Length() ) , m_iAfterPostLen ( tQuery.m_sAfterMatchPassage.Length() ) { m_dResult.Reserve ( m_iDocLen ); } virtual ~HighlightPlain_c () {} virtual void OnOverlap ( int iStart, int iLen ) { assert ( m_pDoc ); assert ( iStart>=0 && m_pDoc+iStart+iLen<=m_pTokenizer->GetBufferEnd() ); ResultEmit ( m_pDoc+iStart, iLen ); } virtual void OnSkipHtml ( int iStart, int iLen ) { assert ( m_pDoc ); assert ( iStart>=0 && m_pDoc+iStart+iLen<=m_pTokenizer->GetBufferEnd() ); ResultEmit ( m_pDoc+iStart, iLen ); } virtual void OnToken ( int iStart, int iLen, const BYTE * sWord, DWORD, const CSphVector & dWordids ) { assert ( m_pDoc ); assert ( iStart>=0 && m_pDoc+iStart+iLen<=m_pTokenizer->GetBufferEnd() ); bool bMatch = m_tContainer.FindWord ( dWordids[0], sWord , iLen )!=-1; // the primary one; need this for star matching for ( int i=1; i=0 && m_pDoc+iStart+iLen<=m_pTokenizer->GetBufferEnd() ); ResultEmit ( m_pDoc+iStart, iLen ); } virtual void OnFinish () {} virtual const CSphVector * GetHitlist ( const XQKeyword_t & ) const { return NULL; } }; /// functor that matches tokens against hit positions from mini-index and highlights them class HighlightQuery_c : public HighlightPlain_c { public: const SphHitMark_t * m_pHit; const SphHitMark_t * m_pHitEnd; public: HighlightQuery_c ( SnippetsDocIndex_c & tContainer, ISphTokenizer * pTokenizer, CSphDict * pDict, const ExcerptQuery_t & tQuery, const CSphIndexSettings & tSettingsIndex, const char * sDoc, int iDocLen, const CSphVector & dHits ) : HighlightPlain_c ( tContainer, pTokenizer, pDict, tQuery, tSettingsIndex, sDoc, iDocLen ) , m_pHit ( dHits.Begin() ) , m_pHitEnd ( dHits.Begin()+dHits.GetLength() ) {} virtual ~HighlightQuery_c () {} virtual void OnToken ( int iStart, int iLen, const BYTE *, DWORD uPosition, const CSphVector & ) { assert ( m_pDoc ); assert ( iStart>=0 && m_pDoc+iStart+iLen<=m_pTokenizer->GetBufferEnd() ); // fast forward until next potentially matching hit (hits are sorted by position) while ( m_pHitm_uPosition+m_pHit->m_uSpan<=uPosition ) m_pHit++; // marker folding, emit "before" marker at span start only if ( m_pHitm_uPosition ) { ResultEmit ( m_sBeforeMatch.cstr(), m_iBeforeLen, m_bHasBeforePassageMacro, m_iPassageId, m_sBeforeMatchPassage.cstr(), m_iBeforePostLen ); m_iMatchesCount++; } // emit token itself ResultEmit ( m_pDoc+iStart, iLen ); // marker folding, emit "after" marker at span end only if ( m_pHitm_uPosition+m_pHit->m_uSpan-1 ) ResultEmit ( m_sAfterMatch.cstr(), m_iAfterLen, m_bHasAfterPassageMacro, m_iPassageId++, m_sAfterMatchPassage.cstr(), m_iAfterPostLen ); } }; // make zone name lowercase static void CopyZoneName ( CSphVector & dName, const char * sZone, int iLen ) { dName.Resize ( iLen+1 ); char * pDst = dName.Begin(); const char * pEnd = sZone + iLen; while ( sZone & dZoneStack, CSphVector & dZoneName #ifndef NDEBUG , const char * pBuf , CSphVector & dZonePos #endif ) { CSphVector & dZones = tFunctor.m_dZones; SmallStringHash_T & hZones = tFunctor.m_hZones; // span's management if ( *pStart!='/' ) // open zone { #ifndef NDEBUG // zone position in characters dZonePos.Add ( pStart-pBuf ); #endif // zone stack management int iSelf = dZones.GetLength(); dZoneStack.Add ( iSelf ); // add zone itself int iZoneNameLen = pEnd-pStart-1; CopyZoneName ( dZoneName, pStart, iZoneNameLen ); int iZone = FindAddZone ( dZoneName.Begin(), iZoneNameLen, hZones ); dZones.Add ( sphPackZone ( uPosition, iSelf, iZone ) ); } else // close zone { #ifndef NDEBUG // lets check open - close tags match assert ( dZoneStack.GetLength() && dZoneStack.Last()>32 ) & UINT32_MASK ); assert ( iZone==(int)( uOpenPacked & UINT16_MASK ) ); // check for zone's types match; dZones[iOpen] = sphPackZone ( uOpenPos, iClose, iZone ); dZones.Add ( sphPackZone ( uPosition, iOpen, iZone ) ); #ifndef NDEBUG // zone position in characters dZonePos.Add ( pStart-pBuf ); #endif // pop up current zone from zone's stack dZoneStack.Resize ( dZoneStack.GetLength()-1 ); } } /// tokenize document using a given functor static void TokenizeDocument ( TokenFunctorTraits_c & tFunctor, const CSphHTMLStripper * pStripper ) { ISphTokenizer * pTokenizer = tFunctor.m_pTokenizer; CSphDict * pDict = tFunctor.m_pDict; const char * pStartPtr = pTokenizer->GetBufferPtr (); const char * pLastTokenEnd = pStartPtr; const char * pBufferEnd = pTokenizer->GetBufferEnd(); assert ( pStartPtr && pLastTokenEnd ); bool bRetainHtml = tFunctor.m_sStripMode=="retain"; int iSPZ = tFunctor.m_iPassageBoundary; int uPosition = 0; BYTE * sWord = NULL; SphWordID_t iBlendID = 0; SphWordID_t iBlendedExactID = 0; const char * pBlendedEnd = NULL; CSphVector dZoneStack; CSphVector dZoneName ( 16+3*SPH_MAX_WORD_LEN ); BYTE sExactBuf [ 3*SPH_MAX_WORD_LEN+4 ]; BYTE sNonStemmed [ 3*SPH_MAX_WORD_LEN+3]; // FIXME!!! replace by query SPZ extraction pass if ( !iSPZ && ( bRetainHtml && tFunctor.m_bHighlightQuery ) ) iSPZ = MAGIC_CODE_ZONE; #ifndef NDEBUG CSphVector dZonePos; #endif const bool bUtf8 = pTokenizer->IsUtf8(); while ( ( sWord = pTokenizer->GetToken() )!=NULL ) { if ( pTokenizer->TokenIsBlended() ) { if ( pBlendedEndGetTokenEnd() ) { iBlendID = pDict->GetWordID ( sWord ); iBlendedExactID = 0; pBlendedEnd = pTokenizer->GetTokenEnd(); if ( tFunctor.m_bHighlightQuery && tFunctor.m_bIndexExactWords ) { int iLen = strlen ( (const char *)sWord ); if ( iLen+2>(int)sizeof(sExactBuf) ) iLen = (int)sizeof(sExactBuf)-2; memcpy ( sExactBuf + 1, sWord, iLen ); sExactBuf[0] = MAGIC_WORD_HEAD_NONSTEMMED; sExactBuf[iLen+1] = '\0'; iBlendedExactID = pDict->GetWordIDNonStemmed ( sExactBuf ); } } continue; } uPosition += pTokenizer->GetOvershortCount(); const char * pTokenStart = pTokenizer->GetTokenStart (); if ( pTokenStart>pLastTokenEnd ) { tFunctor.OnOverlap ( pLastTokenEnd-pStartPtr, pTokenStart - pLastTokenEnd ); pLastTokenEnd = pTokenStart; } if ( bRetainHtml && *pTokenStart=='<' ) { const CSphHTMLStripper::StripperTag_t * pTag = NULL; const BYTE * sZoneName = NULL; const char * pEndSPZ = NULL; int iZoneNameLen = 0; if ( iSPZ && pStripper && pTokenStart+2IsValidTagStart ( *(pTokenStart+1) ) || pTokenStart[1]=='/') ) { pEndSPZ = (const char *)pStripper->FindTag ( (const BYTE *)pTokenStart+1, &pTag, &sZoneName, &iZoneNameLen ); } // regular HTML markup - keep it int iTagEnd = FindTagEnd ( pTokenStart ); if ( iTagEnd!=-1 ) { assert ( pTokenStart+iTagEndGetBufferEnd() ); tFunctor.OnSkipHtml ( pTokenStart-pStartPtr, iTagEnd+1 ); pTokenizer->SetBufferPtr ( pTokenStart+iTagEnd+1 ); pLastTokenEnd = pTokenStart+iTagEnd+1; // fix it up to prevent adding last chunk on exit } if ( pTag ) // (!S)PZ fix-up { pEndSPZ += ( pEndSPZ+1<=pBufferEnd && ( *pEndSPZ )!='\0' ); // skip closing angle bracket, if any assert ( pTag->m_bPara || pTag->m_bZone ); assert ( pTag->m_bPara || pEndSPZ[0]=='\0' || pEndSPZ[-1]=='>' ); // should be at tag's end assert ( pEndSPZ && pEndSPZ<=pBufferEnd ); uPosition++; // handle paragraph boundaries if ( pTag->m_bPara ) { tFunctor.OnSPZ ( MAGIC_CODE_PARAGRAPH, uPosition, NULL ); } else if ( pTag->m_bZone ) // handle zones { #ifndef NDEBUG AddZone ( pTokenStart+1, pTokenStart+2+iZoneNameLen, uPosition, tFunctor, dZoneStack, dZoneName, pStartPtr, dZonePos ); #else AddZone ( pTokenStart+1, pTokenStart+2+iZoneNameLen, uPosition, tFunctor, dZoneStack, dZoneName ); #endif tFunctor.OnSPZ ( MAGIC_CODE_ZONE, uPosition, dZoneName.Begin() ); } } if ( iTagEnd ) continue; } // handle SPZ tokens GE then needed // add SENTENCE, PARAGRAPH, ZONE token, do junks and tokenizer and pLastTokenEnd fix up // FIXME!!! it heavily depends on such attitude MAGIC_CODE_SENTENCE < MAGIC_CODE_PARAGRAPH < MAGIC_CODE_ZONE if ( *sWord==MAGIC_CODE_SENTENCE || *sWord==MAGIC_CODE_PARAGRAPH || *sWord==MAGIC_CODE_ZONE ) { // SPZ token has position and could be last token too uPosition += ( iSPZ && *sWord>=iSPZ ); if ( *sWord==MAGIC_CODE_ZONE ) { const char * pZoneEnd = pTokenizer->GetBufferPtr(); const char * pZoneStart = pZoneEnd; while ( *pZoneEnd && *pZoneEnd!=MAGIC_CODE_ZONE ) pZoneEnd++; pZoneEnd++; // skip zone token too pTokenizer->SetBufferPtr ( pZoneEnd ); pLastTokenEnd = pZoneEnd; // fix it up to prevent adding last chunk on exit #ifndef NDEBUG AddZone ( pZoneStart, pZoneEnd, uPosition, tFunctor, dZoneStack, dZoneName, pStartPtr, dZonePos ); #else AddZone ( pZoneStart, pZoneEnd, uPosition, tFunctor, dZoneStack, dZoneName ); #endif } if ( iSPZ && *sWord>=iSPZ ) { tFunctor.OnSPZ ( *sWord, uPosition, dZoneName.Begin() ); } continue; } pLastTokenEnd = pTokenizer->GetTokenEnd (); int iWordLen = pLastTokenEnd - pTokenStart; // build wordids vector // (exact form, blended, substrings all yield multiple ids) // TODO! only doing exact currently; add everything else (blended/star) here too CSphVector dWordids; dWordids.Add ( 0 ); // will be fixed up later with "primary" wordid if ( tFunctor.m_bHighlightQuery && tFunctor.m_bIndexExactWords ) { int iBytes = iWordLen; if ( iBytes+2>(int)sizeof(sExactBuf) ) iBytes = (int)sizeof(sExactBuf)-2; memcpy ( sExactBuf + 1, sWord, iBytes ); sExactBuf[0] = MAGIC_WORD_HEAD_NONSTEMMED; sExactBuf[iBytes+1] = '\0'; dWordids.Add ( pDict->GetWordIDNonStemmed ( sExactBuf ) ); } int iNonStemmedLen = iWordLen; if ( iNonStemmedLen+1>(int)sizeof(sNonStemmed) ) iNonStemmedLen = sizeof(sNonStemmed)-1; memcpy ( sNonStemmed, sWord, iNonStemmedLen ); sNonStemmed[iNonStemmedLen] = '\0'; // must be last because it can change (stem) sWord SphWordID_t iWord = pDict->GetWordID ( sWord ); dWordids[0] = iWord; // compute position if ( pTokenizer->GetBoundary() ) uPosition += tFunctor.m_iBoundaryStep; bool bIsStopWord = false; if ( !iWord ) bIsStopWord = pDict->IsStopWord ( sWord ); if ( iWord || bIsStopWord ) uPosition += bIsStopWord ? tFunctor.m_iStopwordStep : 1; ExcerptGen_c::Token_t tDocTok; tDocTok.m_eType = ( iWord || bIsStopWord ) ? ExcerptGen_c::TOK_WORD : ExcerptGen_c::TOK_SPACE; tDocTok.m_uPosition = ( iWord || bIsStopWord ) ? uPosition : 0; tDocTok.m_iStart = pTokenStart - pStartPtr; tDocTok.m_iLengthBytes = tDocTok.m_iLengthCP = iWordLen; if ( bUtf8 && ( iWord || bIsStopWord ) ) tDocTok.m_iLengthCP = sphUTF8Len ( pTokenStart, tDocTok.m_iLengthBytes ); if ( !pTokenizer->TokenIsBlendedPart() ) { iBlendID = 0; iBlendedExactID = 0; } else { dWordids.Add ( iBlendID ); if ( iBlendedExactID ) dWordids.Add ( iBlendedExactID ); } // match & emit // star match needs non-stemmed word tFunctor.OnToken ( tDocTok.m_iStart, tDocTok.m_iLengthBytes, sNonStemmed, tDocTok.m_uPosition, dWordids ); } // last space if any if ( pLastTokenEnd!=pTokenizer->GetBufferEnd() ) tFunctor.OnTail ( pLastTokenEnd-pStartPtr, pTokenizer->GetBufferEnd() - pLastTokenEnd ); tFunctor.OnFinish(); } /// snippets query words for different cases class SnippetsFastQword_c : public ISphQword { public: const CSphVector * m_pHits; CSphMatch m_tMatch; DWORD m_uLastPos; public: explicit SnippetsFastQword_c ( const CSphVector * pHits ) : m_pHits ( pHits ) , m_uLastPos ( 0 ) {} virtual ~SnippetsFastQword_c () {} void Setup ( DWORD uLastPos ) { m_iDocs = 0; m_iHits = 0; m_uLastPos = uLastPos; if ( m_pHits ) { m_iDocs = 1; m_iHits = m_pHits->GetLength(); m_uMatchHits = 0; m_bHasHitlist = true; } } bool HasHits () const { return m_pHits && m_uMatchHits<(DWORD)m_pHits->GetLength(); } virtual const CSphMatch & GetNextDoc ( DWORD * ) { m_dQwordFields.Set(); m_tMatch.m_iDocID = !m_tMatch.m_iDocID && HasHits() ? 1 : 0; return m_tMatch; } virtual Hitpos_t GetNextHit () { if ( !HasHits() ) return EMPTY_HIT; int iPosition = *( m_pHits->Begin() + m_uMatchHits++ ); return HITMAN::Create ( 0, iPosition, (m_uLastPos==(DWORD)iPosition) ); } virtual void SeekHitlist ( SphOffset_t ) {} }; /// snippets query word setup class SnippetsFastQwordSetup_c : public ISphQwordSetup { public: const HitCollector_c * m_pHiglighter; public: explicit SnippetsFastQwordSetup_c ( const HitCollector_c * pHiglighter ) { m_pHiglighter = pHiglighter; } virtual ~SnippetsFastQwordSetup_c () {} virtual ISphQword * QwordSpawn ( const XQKeyword_t & tWord ) const { return new SnippetsFastQword_c ( m_pHiglighter->GetHitlist ( tWord ) ); } virtual bool QwordSetup ( ISphQword * pQword ) const { SnippetsFastQword_c * pWord = dynamic_cast ( pQword ); assert ( pWord ); pWord->Setup ( m_pHiglighter->m_tContainer.m_uLastPos ); return true; } }; inline bool operator < ( const SphHitMark_t & a, const SphHitMark_t & b ) { return a.m_uPosition < b.m_uPosition; } static char * HighlightAllFastpath ( const ExcerptQuery_t & tQuerySettings, const CSphIndexSettings & tIndexSettings, const char * sDoc, int iDocLen, CSphDict * pDict, ISphTokenizer * pTokenizer, const CSphHTMLStripper * pStripper, const CSphSchema * pSchema, CSphString & sError, ISphTokenizer * pQueryTokenizer ) { ExcerptQuery_t tFixedSettings ( tQuerySettings ); // exact_phrase is replaced by query_mode=1 + "query words" if ( tQuerySettings.m_bExactPhrase ) { if ( !tQuerySettings.m_bHighlightQuery && tQuerySettings.m_sWords.Length() && strchr ( tQuerySettings.m_sWords.cstr(), 0x22 )==NULL ) tFixedSettings.m_sWords.SetSprintf ( "\"%s\"", tQuerySettings.m_sWords.cstr() ); tFixedSettings.m_bHighlightQuery = true; } bool bRetainHtml = ( tFixedSettings.m_sStripMode=="retain" ); // adjust tokenizer for markup-retaining mode if ( bRetainHtml ) pTokenizer->AddSpecials ( "<" ); // create query and hit lists container, parse query SnippetsDocIndex_c tContainer ( tFixedSettings.m_bHighlightQuery ); if ( !tContainer.Parse ( tFixedSettings.m_sWords.cstr(), pQueryTokenizer, pDict, pSchema, sError, tIndexSettings.m_iStopwordStep ) ) return NULL; // fast-path collects no passages but that flag says what SPZ should we collect if ( tFixedSettings.m_bHighlightQuery && !tFixedSettings.m_iPassageBoundary ) tFixedSettings.m_iPassageBoundary = tContainer.GetSPZ(); // do highlighting if ( !tFixedSettings.m_bHighlightQuery ) { // simple bag of words query // do just one tokenization pass over the document, matching and highlighting keywords HighlightPlain_c tHighlighter ( tContainer, pTokenizer, pDict, tFixedSettings, tIndexSettings, sDoc, iDocLen ); TokenizeDocument ( tHighlighter, NULL ); if ( !tHighlighter.m_iMatchesCount && tFixedSettings.m_bAllowEmpty ) tHighlighter.m_dResult.Reset(); // add trailing zero, and return tHighlighter.m_dResult.Add ( 0 ); return (char*) tHighlighter.m_dResult.LeakData(); } else { // query with syntax // do two passes over document // 1st pass will tokenize document, match keywords, and store positions into docindex // 2nd pass will highlight matching positions only (with some matching engine aid) // do the 1st pass HitCollector_c tHitCollector ( tContainer, pTokenizer, pDict, tFixedSettings, tIndexSettings, sDoc, iDocLen ); TokenizeDocument ( tHitCollector, pStripper ); // prepare for the 2nd pass (that is, extract matching hits) SnippetZoneChecker_c tZoneChecker ( tHitCollector.m_dZones, tHitCollector.m_hZones, tContainer.m_tQuery.m_dZones ); SnippetsFastQwordSetup_c tQwordSetup ( &tHitCollector ); tQwordSetup.m_pDict = pDict; tQwordSetup.m_eDocinfo = SPH_DOCINFO_EXTERN; tQwordSetup.m_pWarning = &sError; tQwordSetup.m_pZoneChecker = &tZoneChecker; CSphScopedPtr pMarker ( CSphHitMarker::Create ( tContainer.m_tQuery.m_pRoot, tQwordSetup ) ); if ( !pMarker.Ptr() ) return NULL; CSphVector dMarked; pMarker->Mark ( dMarked ); // we just collected matching spans into dMarked, but! // certain spans might not match all words within the span // for instance, (one NEAR/3 two) could return a 5-word span // but we do have full matching keywords list in tContainer // so let's post-process and break down such spans // FIXME! what about phrase spans vs stopwords? they will be split now if ( !tQuerySettings.m_bExactPhrase ) ARRAY_FOREACH ( i, dMarked ) { if ( dMarked[i].m_uSpan==1 ) continue; CSphVector dMatched; for ( int j=0; j<(int)dMarked[i].m_uSpan; j++ ) { // OPTIMZE? we can premerge all dochits vectors once const int iPos = dMarked[i].m_uPosition + j; ARRAY_FOREACH ( k, tContainer.m_dDocHits ) if ( tContainer.m_dDocHits[k].BinarySearch ( iPos ) ) { dMatched.Add ( iPos ); break; } } // this is something that must never happen // we got a span out of the matching engine that does not match any keywords?! assert ( dMatched.GetLength() ); if ( !dMatched.GetLength() ) { dMarked.RemoveFast ( i-- ); // remove, rescan continue; } // append all matching keywords as 1-long spans ARRAY_FOREACH ( j, dMatched ) { SphHitMark_t & tMarked = dMarked.Add(); tMarked.m_uPosition = dMatched[j]; tMarked.m_uSpan = 1; } // this swaps current span with the last 1-long span we added // which is by definition okay; so we need not rescan it dMarked.RemoveFast ( i ); } dMarked.Uniq(); // we just exploded spans into actual matching hits // now lets fold marked and matched hits back into contiguous spans // so that we could highlight such spans instead of every individual word SphHitMark_t * pOut = dMarked.Begin(); // last emitted folded token SphHitMark_t * pIn = dMarked.Begin() + 1; // next token to process SphHitMark_t * pMax = dMarked.Begin() + dMarked.GetLength(); while ( pInm_uPosition==( pOut->m_uPosition + pOut->m_uSpan ) ) { pOut->m_uSpan += pIn->m_uSpan; pIn++; } else { *++pOut = *pIn++; } } if ( dMarked.GetLength()>1 ) dMarked.Resize ( pOut - dMarked.Begin() + 1 ); // 2nd pass HighlightQuery_c tHighlighter ( tContainer, pTokenizer, pDict, tFixedSettings, tIndexSettings, sDoc, iDocLen, dMarked ); TokenizeDocument ( tHighlighter, pStripper ); if ( !tHighlighter.m_iMatchesCount && tFixedSettings.m_bAllowEmpty ) tHighlighter.m_dResult.Reset(); // add trailing zero, and return tHighlighter.m_dResult.Add ( 0 ); return (char*) tHighlighter.m_dResult.LeakData(); } } ///////////////////////////////////////////////////////////////////////////// ExcerptQuery_t::ExcerptQuery_t () : m_sBeforeMatch ( "" ) , m_sAfterMatch ( "" ) , m_sChunkSeparator ( " ... " ) , m_sStripMode ( "index" ) , m_iLimit ( 256 ) , m_iLimitWords ( 0 ) , m_iLimitPassages ( 0 ) , m_iAround ( 5 ) , m_iPassageId ( 1 ) , m_iPassageBoundary ( 0 ) , m_bRemoveSpaces ( false ) , m_bExactPhrase ( false ) , m_bUseBoundaries ( false ) , m_bWeightOrder ( false ) , m_bHighlightQuery ( false ) , m_bForceAllWords ( false ) , m_iLoadFiles ( 0 ) , m_bAllowEmpty ( false ) , m_bEmitZones ( false ) , m_iRawFlags ( -1 ) , m_iSize ( 0 ) , m_iSeq ( 0 ) , m_iNext ( -2 ) , m_sRes ( NULL ) , m_bHasBeforePassageMacro ( false ) , m_bHasAfterPassageMacro ( false ) { } ///////////////////////////////////////////////////////////////////////////// char * sphBuildExcerpt ( ExcerptQuery_t & tOptions, CSphDict * pDict, ISphTokenizer * pTokenizer, const CSphSchema * pSchema, CSphIndex * pIndex, CSphString & sError, const CSphHTMLStripper * pStripper, ISphTokenizer * pQueryTokenizer ) { if ( tOptions.m_sStripMode=="retain" && !( tOptions.m_iLimit==0 && tOptions.m_iLimitPassages==0 && tOptions.m_iLimitWords==0 ) ) { sError = "html_strip_mode=retain requires that all limits are zero"; return NULL; } if ( !tOptions.m_sWords.cstr()[0] ) tOptions.m_bHighlightQuery = false; char * pData = const_cast ( tOptions.m_sSource.cstr() ); CSphFixedVector pBuffer ( 0 ); int iDataLen = tOptions.m_sSource.Length(); if ( tOptions.m_iLoadFiles ) { CSphAutofile tFile; if ( tFile.Open ( tOptions.m_sSource.cstr(), SPH_O_READ, sError )<0 ) return NULL; // will this ever trigger? time will tell; email me if it does! if ( tFile.GetSize()+1>=(SphOffset_t)INT_MAX ) { sError.SetSprintf ( "%s too big for snippet (over 2 GB)", pData ); return NULL; } int iFileSize = (int)tFile.GetSize(); if ( iFileSize<=0 ) { static char sEmpty[] = ""; return sEmpty; } iDataLen = iFileSize+1; pBuffer.Reset ( iDataLen ); if ( !tFile.Read ( pBuffer.Begin(), iFileSize, sError ) ) return NULL; pBuffer[iFileSize] = 0; pData = pBuffer.Begin(); } // strip if we have to if ( pStripper && ( tOptions.m_sStripMode=="strip" || tOptions.m_sStripMode=="index" ) ) pStripper->Strip ( (BYTE*)pData ); if ( tOptions.m_sStripMode!="retain" ) pStripper = NULL; // FIXME!!! check on real data (~100 Mb) as stripper changes len iDataLen = strlen ( pData ); // fast path that highlights entire document if (!( tOptions.m_iLimitPassages || ( tOptions.m_iLimitWords && tOptions.m_iLimitWordsGetSettings(), pData, iDataLen, pDict, pTokenizer, pStripper, pSchema, sError, pQueryTokenizer ); } if ( !tOptions.m_bHighlightQuery ) { // legacy highlighting ExcerptGen_c tGenerator ( pTokenizer->IsUtf8() ); tGenerator.TokenizeQuery ( tOptions, pDict, pTokenizer, pIndex->GetSettings() ); tGenerator.SetExactPhrase ( tOptions ); tGenerator.TokenizeDocument ( pData, iDataLen, pDict, pTokenizer, true, tOptions, pIndex->GetSettings() ); return tGenerator.BuildExcerpt ( tOptions ); } XQQuery_t tQuery; if ( !sphParseExtendedQuery ( tQuery, tOptions.m_sWords.cstr(), pQueryTokenizer, pSchema, pDict, pIndex->GetSettings().m_iStopwordStep ) ) { sError = tQuery.m_sParseError; return NULL; } tQuery.m_pRoot->ClearFieldMask(); ExcerptGen_c tGenerator ( pTokenizer->IsUtf8() ); tGenerator.TokenizeDocument ( pData, iDataLen, pDict, pTokenizer, false, tOptions, pIndex->GetSettings() ); CSphScopedPtr pZoneChecker ( new SnippetZoneChecker_c ( tGenerator.GetZones(), tGenerator.GetZonesName(), tQuery.m_dZones ) ); SnippetsQwordSetup tSetup ( &tGenerator, pTokenizer ); CSphString sWarning; tSetup.m_pDict = pDict; tSetup.m_pIndex = pIndex; tSetup.m_eDocinfo = SPH_DOCINFO_EXTERN; tSetup.m_pWarning = &sWarning; tSetup.m_pZoneChecker = pZoneChecker.Ptr(); CSphScopedPtr pMarker ( CSphHitMarker::Create ( tQuery.m_pRoot, tSetup ) ); if ( !pMarker.Ptr() ) { sError = sWarning; return NULL; } tGenerator.SetMarker ( pMarker.Ptr() ); return tGenerator.BuildExcerpt ( tOptions ); } // // $Id: sphinxexcerpt.cpp 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/Makefile.am0000644000176700017710000000051211574674605016014 0ustar deogardeogarif USE_LIBSTEMMER SUBDIRS = libstemmer_c src test doc else SUBDIRS = src test doc endif EXTRA_DIST = api storage sphinx.conf.in sphinx-min.conf.in example.sql sysconf_DATA = sphinx.conf.dist sphinx-min.conf.dist example.sql install-data-hook: mkdir -p $(DESTDIR)$(localstatedir)/data && mkdir -p $(DESTDIR)$(localstatedir)/log sphinx-2.0.4-release/smoke.sh0000755000176700017710000000230511723113351015415 0ustar deogardeogar#!/bin/sh SHELL='/bin/sh' jobs=$(grep -c processor /proc/cpuinfo) [ "z$FAILLOG" = "z" ] && FAILLOG="/tmp/faillog" LINE="-------------------------------\n" if [ q"1" = q"$jobs" ] ; then jobs="" else jobs="-j$jobs" fi die() { cat $FAILLOG echo $LINE [ ! "z$2" = "z" ] && { eval $2; echo "$LINE"; } echo "$1" [ -e "$FAILLOG" ] && rm $FAILLOG exit 1 } cmd1 () { echo "Executing: $2\n">$FAILLOG eval $2 1>>$1 2>&1 || die "$3" "$4" } cmd () { cmd1 "$FAILLOG" "$1" "$2" "$3" } cmd "mysql -u root test < example.sql" "Documents setup failed" for CONFARGS in "--with-debug" "--with-debug --enable-id64"; do BANNER="testing $CONFARGS build" cmd "$SHELL ./configure $CONFARGS" "$BANNER: configure failed" cmd "make clean" "$BANNER: make clean failed" cmd "make $jobs" "$BANNER: make failed" cmd1 "/dev/null" "cd ./test; php ubertest.php t -u test --strict-verbose --no-demo" "$BANNER: regression suite failed" "cat report.txt; rm report.txt" cd .. cmd "cd ./src; ./tests" "$BANNER: unit tests failed" cd .. cmd "cd ./api/libsphinxclient; ./smoke_test.sh" "$BANNER: C API tests failed" cd ../../ done make clean 1>/dev/null 2>&1 [ -e "$FAILLOG" ] && rm "$FAILLOG" echo "all ok\n" exit 0 sphinx-2.0.4-release/doc/0000755000176700017710000000000011724063141014507 5ustar deogardeogarsphinx-2.0.4-release/doc/Makefile.am0000644000176700017710000000215211574674605016563 0ustar deogardeogarman1_MANS = searchd.1 search.1 indexer.1 indextool.1 spelldump.1 # apt-get install docbook-xsl # apt-get install xsltproc # DOCBOOKXSL = /usr/share/xml/docbook/stylesheet/docbook-xsl XSLTARGSMAN = $(DOCBOOKXSL)/manpages/docbook.xsl all: @echo "All docs are already pre-built by developer." @echo "If you want to rebuild them, install docbook-xsl" @echo "and xsltproc and then run 'make docs' instead of simple 'make'." docs: sphinx.html sphinx.txt my_manpages my_manpages: manpages.xml xsltproc $(XSLTARGSMAN) manpages.xml sphinx.html: sphinx.xml sed -r 's///g;s/<\/b>/<\/emphasis>/g;s/(fixed|bug) #([0-9]+)/\1 \#\2<\/ulink>/g' sphinx.xml \ | xsltproc sphinx.xsl - \ | sed -r 's/\xA0/\ /g;s/\xA9/\©/g;s/((<\/(li|dt|dt|head|div)>)+)/\1\n/g;s/<\/a>//g;s/<\/head>/\n sphinx-2.0.4-release/doc/mk.cmd0000755000176700017710000000250111655247360015616 0ustar deogardeogar@echo off if "%1" EQU "chunked" ( set XSLTARGS=^ --stringparam toc.section.depth 1 ^ --stringparam generate.section.toc.level 2 ^ --stringparam chunk.first.sections 1 ^ --stringparam chunk.section.depth 2 ^ --stringparam base.dir chunked/ ^ --stringparam use.id.as.filename 1 ^ %DOCBOOKXSL%/html/chunk.xsl ) else ( set XSLTARGS=^ --stringparam toc.section.depth 4 ^ %DOCBOOKXSL%/html/docbook.xsl ) type sphinx.xml ^ | perl -pe "s///g" ^ | perl -pe "s/<\/b>/<\/emphasis>/g" ^ | perl -pe "s/(fixed|bug) #(\d+)/\1 #\2<\/ulink>/" ^ | xsltproc ^ --stringparam section.autolabel 1 ^ --stringparam section.label.includes.component.label 1 ^ %XSLTARGS% ^ - ^ | perl -pe "s/\xA0/\ /g" ^ | perl -pe "s/\xA9/\©/g" ^ | perl -pe "s/\xEF/\ï/g" ^ | perl -pe "s/((<\/(li|dt|dt|head|div)>)+)/\1\n/g" ^ | perl -pe "s/<\/a>//g" ^ | perl -pe "s/<\/head>/\n

    Table of Contents

    1. Introduction
    1.1. About
    1.2. Sphinx features
    1.3. Where to get Sphinx
    1.4. License
    1.5. Credits
    1.6. History
    2. Installation
    2.1. Supported systems
    2.2. Required tools
    2.3. Installing Sphinx on Linux
    2.4. Installing Sphinx on Windows
    2.5. Known installation issues
    2.6. Quick Sphinx usage tour
    3. Indexing
    3.1. Data sources
    3.2. Full-text fields
    3.3. Attributes
    3.4. MVA (multi-valued attributes)
    3.5. Indexes
    3.6. Restrictions on the source data
    3.7. Charsets, case folding, and translation tables
    3.8. SQL data sources (MySQL, PostgreSQL)
    3.9. xmlpipe data source
    3.10. xmlpipe2 data source
    3.11. Live index updates
    3.12. Delta index updates
    3.13. Index merging
    4. Real-time indexes
    4.1. RT indexes overview
    4.2. Known caveats with RT indexes
    4.3. RT index internals
    4.4. Binary logging
    5. Searching
    5.1. Matching modes
    5.2. Boolean query syntax
    5.3. Extended query syntax
    5.4. Search results ranking
    5.5. Expressions, functions, and operators
    5.5.1. Operators
    5.5.2. Numeric functions
    5.5.3. Date and time functions
    5.5.4. Type conversion functions
    5.5.5. Comparison functions
    5.5.6. Miscellaneous functions
    5.6. Sorting modes
    5.7. Grouping (clustering) search results
    5.8. Distributed searching
    5.9. searchd query log formats
    5.9.1. Plain log format
    5.9.2. SphinxQL log format
    5.10. MySQL protocol support and SphinxQL
    5.11. Multi-queries
    5.12. Collations
    5.13. User-defined functions (UDF)
    6. Command line tools reference
    6.1. indexer command reference
    6.2. searchd command reference
    6.3. search command reference
    6.4. spelldump command reference
    6.5. indextool command reference
    7. SphinxQL reference
    7.1. SELECT syntax
    7.2. SHOW META syntax
    7.3. SHOW WARNINGS syntax
    7.4. SHOW STATUS syntax
    7.5. INSERT and REPLACE syntax
    7.6. DELETE syntax
    7.7. SET syntax
    7.8. SET TRANSACTION syntax
    7.9. BEGIN, COMMIT, and ROLLBACK syntax
    7.10. CALL SNIPPETS syntax
    7.11. CALL KEYWORDS syntax
    7.12. SHOW TABLES syntax
    7.13. DESCRIBE syntax
    7.14. CREATE FUNCTION syntax
    7.15. DROP FUNCTION syntax
    7.16. SHOW VARIABLES syntax
    7.17. SHOW COLLATION syntax
    7.18. UPDATE syntax
    7.19. ATTACH INDEX syntax
    7.20. FLUSH RTINDEX syntax
    7.21. Multi-statement queries
    7.22. Comment syntax
    7.23. List of SphinxQL reserved keywords
    7.24. SphinxQL upgrade notes, version 2.0.1-beta
    8. API reference
    8.1. General API functions
    8.1.1. GetLastError
    8.1.2. GetLastWarning
    8.1.3. SetServer
    8.1.4. SetRetries
    8.1.5. SetConnectTimeout
    8.1.6. SetArrayResult
    8.1.7. IsConnectError
    8.2. General query settings
    8.2.1. SetLimits
    8.2.2. SetMaxQueryTime
    8.2.3. SetOverride
    8.2.4. SetSelect
    8.3. Full-text search query settings
    8.3.1. SetMatchMode
    8.3.2. SetRankingMode
    8.3.3. SetSortMode
    8.3.4. SetWeights
    8.3.5. SetFieldWeights
    8.3.6. SetIndexWeights
    8.4. Result set filtering settings
    8.4.1. SetIDRange
    8.4.2. SetFilter
    8.4.3. SetFilterRange
    8.4.4. SetFilterFloatRange
    8.4.5. SetGeoAnchor
    8.5. GROUP BY settings
    8.5.1. SetGroupBy
    8.5.2. SetGroupDistinct
    8.6. Querying
    8.6.1. Query
    8.6.2. AddQuery
    8.6.3. RunQueries
    8.6.4. ResetFilters
    8.6.5. ResetGroupBy
    8.7. Additional functionality
    8.7.1. BuildExcerpts
    8.7.2. UpdateAttributes
    8.7.3. BuildKeywords
    8.7.4. EscapeString
    8.7.5. Status
    8.7.6. FlushAttributes
    8.8. Persistent connections
    8.8.1. Open
    8.8.2. Close
    9. MySQL storage engine (SphinxSE)
    9.1. SphinxSE overview
    9.2. Installing SphinxSE
    9.2.1. Compiling MySQL 5.0.x with SphinxSE
    9.2.2. Compiling MySQL 5.1.x with SphinxSE
    9.2.3. Checking SphinxSE installation
    9.3. Using SphinxSE
    9.4. Building snippets (excerpts) via MySQL
    10. Reporting bugs
    11. sphinx.conf options reference
    11.1. Data source configuration options
    11.1.1. type
    11.1.2. sql_host
    11.1.3. sql_port
    11.1.4. sql_user
    11.1.5. sql_pass
    11.1.6. sql_db
    11.1.7. sql_sock
    11.1.8. mysql_connect_flags
    11.1.9. mysql_ssl_cert, mysql_ssl_key, mysql_ssl_ca
    11.1.10. odbc_dsn
    11.1.11. sql_query_pre
    11.1.12. sql_query
    11.1.13. sql_joined_field
    11.1.14. sql_query_range
    11.1.15. sql_range_step
    11.1.16. sql_query_killlist
    11.1.17. sql_attr_uint
    11.1.18. sql_attr_bool
    11.1.19. sql_attr_bigint
    11.1.20. sql_attr_timestamp
    11.1.21. sql_attr_str2ordinal
    11.1.22. sql_attr_float
    11.1.23. sql_attr_multi
    11.1.24. sql_attr_string
    11.1.25. sql_attr_str2wordcount
    11.1.26. sql_column_buffers
    11.1.27. sql_field_string
    11.1.28. sql_field_str2wordcount
    11.1.29. sql_file_field
    11.1.30. sql_query_post
    11.1.31. sql_query_post_index
    11.1.32. sql_ranged_throttle
    11.1.33. sql_query_info
    11.1.34. xmlpipe_command
    11.1.35. xmlpipe_field
    11.1.36. xmlpipe_field_string
    11.1.37. xmlpipe_field_wordcount
    11.1.38. xmlpipe_attr_uint
    11.1.39. xmlpipe_attr_bigint
    11.1.40. xmlpipe_attr_bool
    11.1.41. xmlpipe_attr_timestamp
    11.1.42. xmlpipe_attr_str2ordinal
    11.1.43. xmlpipe_attr_float
    11.1.44. xmlpipe_attr_multi
    11.1.45. xmlpipe_attr_multi_64
    11.1.46. xmlpipe_attr_string
    11.1.47. xmlpipe_fixup_utf8
    11.1.48. mssql_winauth
    11.1.49. mssql_unicode
    11.1.50. unpack_zlib
    11.1.51. unpack_mysqlcompress
    11.1.52. unpack_mysqlcompress_maxsize
    11.2. Index configuration options
    11.2.1. type
    11.2.2. source
    11.2.3. path
    11.2.4. docinfo
    11.2.5. mlock
    11.2.6. morphology
    11.2.7. dict
    11.2.8. index_sp
    11.2.9. index_zones
    11.2.10. min_stemming_len
    11.2.11. stopwords
    11.2.12. wordforms
    11.2.13. exceptions
    11.2.14. min_word_len
    11.2.15. charset_type
    11.2.16. charset_table
    11.2.17. ignore_chars
    11.2.18. min_prefix_len
    11.2.19. min_infix_len
    11.2.20. prefix_fields
    11.2.21. infix_fields
    11.2.22. enable_star
    11.2.23. ngram_len
    11.2.24. ngram_chars
    11.2.25. phrase_boundary
    11.2.26. phrase_boundary_step
    11.2.27. html_strip
    11.2.28. html_index_attrs
    11.2.29. html_remove_elements
    11.2.30. local
    11.2.31. agent
    11.2.32. agent_blackhole
    11.2.33. agent_connect_timeout
    11.2.34. agent_query_timeout
    11.2.35. preopen
    11.2.36. ondisk_dict
    11.2.37. inplace_enable
    11.2.38. inplace_hit_gap
    11.2.39. inplace_docinfo_gap
    11.2.40. inplace_reloc_factor
    11.2.41. inplace_write_factor
    11.2.42. index_exact_words
    11.2.43. overshort_step
    11.2.44. stopword_step
    11.2.45. hitless_words
    11.2.46. expand_keywords
    11.2.47. blend_chars
    11.2.48. blend_mode
    11.2.49. rt_mem_limit
    11.2.50. rt_field
    11.2.51. rt_attr_uint
    11.2.52. rt_attr_bigint
    11.2.53. rt_attr_float
    11.2.54. rt_attr_multi
    11.2.55. rt_attr_multi_64
    11.2.56. rt_attr_timestamp
    11.2.57. rt_attr_string
    11.3. indexer program configuration options
    11.3.1. mem_limit
    11.3.2. max_iops
    11.3.3. max_iosize
    11.3.4. max_xmlpipe2_field
    11.3.5. write_buffer
    11.3.6. max_file_field_buffer
    11.3.7. on_file_field_error
    11.4. searchd program configuration options
    11.4.1. listen
    11.4.2. address
    11.4.3. port
    11.4.4. log
    11.4.5. query_log
    11.4.6. query_log_format
    11.4.7. read_timeout
    11.4.8. client_timeout
    11.4.9. max_children
    11.4.10. pid_file
    11.4.11. max_matches
    11.4.12. seamless_rotate
    11.4.13. preopen_indexes
    11.4.14. unlink_old
    11.4.15. attr_flush_period
    11.4.16. ondisk_dict_default
    11.4.17. max_packet_size
    11.4.18. mva_updates_pool
    11.4.19. crash_log_path
    11.4.20. max_filters
    11.4.21. max_filter_values
    11.4.22. listen_backlog
    11.4.23. read_buffer
    11.4.24. read_unhinted
    11.4.25. max_batch_queries
    11.4.26. subtree_docs_cache
    11.4.27. subtree_hits_cache
    11.4.28. workers
    11.4.29. dist_threads
    11.4.30. binlog_path
    11.4.31. binlog_flush
    11.4.32. binlog_max_log_size
    11.4.33. collation_server
    11.4.34. collation_libc_locale
    11.4.35. plugin_dir
    11.4.36. mysql_version_string
    11.4.37. rt_flush_period
    11.4.38. thread_stack
    11.4.39. expansion_limit
    11.4.40. compat_sphinxql_magics
    11.4.41. watchdog
    11.4.42. prefork_rotation_throttle
    A. Sphinx revision history
    A.1. Version 2.0.4-release, 02 mar 2012
    A.2. Version 2.0.3-release, 23 dec 2011
    A.3. Version 2.0.2-beta, 15 nov 2011
    A.4. Version 2.0.1-beta, 22 apr 2011
    A.5. Version 1.10-beta, 19 jul 2010
    A.6. Version 0.9.9-release, 02 dec 2009
    A.7. Version 0.9.9-rc2, 08 apr 2009
    A.8. Version 0.9.9-rc1, 17 nov 2008
    A.9. Version 0.9.8.1, 30 oct 2008
    A.10. Version 0.9.8, 14 jul 2008
    A.11. Version 0.9.7, 02 apr 2007
    A.12. Version 0.9.7-rc2, 15 dec 2006
    A.13. Version 0.9.7-rc1, 26 oct 2006
    A.14. Version 0.9.6, 24 jul 2006
    A.15. Version 0.9.6-rc1, 26 jun 2006

    Chapter 1. Introduction

    1.1. About

    Sphinx is a full-text search engine, publicly distributed under GPL version 2. Commercial licensing (eg. for embedded use) is available upon request.

    Technically, Sphinx is a standalone software package provides fast and relevant full-text search functionality to client applications. It was specially designed to integrate well with SQL databases storing the data, and to be easily accessed scripting languages. However, Sphinx does not depend on nor require any specific database to function.

    Applications can access Sphinx search daemon (searchd) using any of the three different access methods: a) via native search API (SphinxAPI), b) via Sphinx own implementation of MySQL network protocol (using a small SQL subset called SphinxQL), or c) via MySQL server with a pluggable storage engine (SphinxSE).

    Official native SphinxAPI implementations for PHP, Perl, Ruby, and Java are included within the distribution package. API is very lightweight so porting it to a new language is known to take a few hours or days. Third party API ports and plugins exist for Perl, C#, Haskell, Ruby-on-Rails, and possibly other languages and frameworks.

    Starting version 1.10-beta, Sphinx supports two different indexing backends: "disk" index backend, and "realtime" (RT) index backend. Disk indexes support online full-text index rebuilds, but online updates can only be done on non-text (attribute) data. RT indexes additionally allow for online full-text index updates. Previous versions only supported disk indexes.

    Data can be loaded into disk indexes using a so-called data source. Built-in sources can fetch data directly from MySQL, PostgreSQL, ODBC compliant database (MS SQL, Oracle, etc), or a pipe in a custom XML format. Adding new data sources drivers (eg. to natively support other DBMSes) is designed to be as easy as possible. RT indexes, as of 1.10-beta, can only be populated using SphinxQL.

    As for the name, Sphinx is an acronym which is officially decoded as SQL Phrase Index. Yes, I know about CMU's Sphinx project.

    1.2. Sphinx features

    Key Sphinx features are:

    • high indexing and searching performance;

    • advanced indexing and querying tools (flexible and feature-rich text tokenizer, querying language, several different ranking modes, etc);

    • advanced result set post-processing (SELECT with expressions, WHERE, ORDER BY, GROUP BY etc over text search results);

    • proven scalability up to billions of documents, terabytes of data, and thousands of queries per second;

    • easy integration with SQL and XML data sources, and SphinxAPI, SphinxQL, or SphinxSE search interfaces;

    • easy scaling with distributed searches.

    To expand a bit, Sphinx:

    • has high indexing speed (upto 10-15 MB/sec per core on an internal benchmark);

    • has high search speed (upto 150-250 queries/sec per core against 1,000,000 documents, 1.2 GB of data on an internal benchmark);

    • has high scalability (biggest known cluster indexes over 3,000,000,000 documents, and busiest one peaks over 50,000,000 queries/day);

    • provides good relevance ranking through combination of phrase proximity ranking and statistical (BM25) ranking;

    • provides distributed searching capabilities;

    • provides document excerpts (snippets) generation;

    • provides searching from within application with SphinxAPI or SphinxQL interfaces, and from within MySQL with pluggable SphinxSE storage engine;

    • supports boolean, phrase, word proximity and other types of queries;

    • supports multiple full-text fields per document (upto 32 by default);

    • supports multiple additional attributes per document (ie. groups, timestamps, etc);

    • supports stopwords;

    • supports morphological word forms dictionaries;

    • supports tokenizing exceptions;

    • supports both single-byte encodings and UTF-8;

    • supports stemming (stemmers for English, Russian and Czech are built-in; and stemmers for French, Spanish, Portuguese, Italian, Romanian, German, Dutch, Swedish, Norwegian, Danish, Finnish, Hungarian, are available by building third party libstemmer library);

    • supports MySQL natively (all types of tables, including MyISAM, InnoDB, NDB, Archive, etc are supported);

    • supports PostgreSQL natively;

    • supports ODBC compliant databases (MS SQL, Oracle, etc) natively;

    • ...has 50+ other features not listed here, refer to API and configuration manual!

    1.3. Where to get Sphinx

    Sphinx is available through its official Web site at http://sphinxsearch.com/.

    Currently, Sphinx distribution tarball includes the following software:

    • indexer: an utility which creates fulltext indexes;

    • search: a simple command-line (CLI) test utility which searches through fulltext indexes;

    • searchd: a daemon which enables external software (eg. Web applications) to search through fulltext indexes;

    • sphinxapi: a set of searchd client API libraries for popular Web scripting languages (PHP, Python, Perl, Ruby).

    • spelldump: a simple command-line tool to extract the items from an ispell or MySpell (as bundled with OpenOffice) format dictionary to help customize your index, for use with wordforms.

    • indextool: an utility to dump miscellaneous debug information about the index, added in version 0.9.9-rc2.

    1.4. License

    This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. See COPYING file for details.

    This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.

    You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA

    Non-GPL licensing (for OEM/ISV embedded use) can also be arranged, please contact us to discuss commercial licensing possibilities.

    1.5. Credits

    Author

    Sphinx initial author (and a benevolent dictator ever since):

    Team

    Past and present employees of Sphinx Technologies Inc who should be noted on their work on Sphinx (in alphabetical order):

    • Alexander Klimenko

    • Alexey Dvoichenkov

    • Alexey Vinogradov

    • Ilya Kuznetsov

    • Stanislav Klinov

    Contributors

    People who contributed to Sphinx and their contributions (in no particular order):

    • Robert "coredev" Bengtsson (Sweden), initial version of PostgreSQL data source

    • Len Kranendonk, Perl API

    • Dmytro Shteflyuk, Ruby API

    Many other people have contributed ideas, bug reports, fixes, etc. Thank you!

    1.6. History

    Sphinx development was started back in 2001, because I didn't manage to find an acceptable search solution (for a database driven Web site) which would meet my requirements. Actually, each and every important aspect was a problem:

    • search quality (ie. good relevance)

      • statistical ranking methods performed rather bad, especially on large collections of small documents (forums, blogs, etc)

    • search speed

      • especially if searching for phrases which contain stopwords, as in "to be or not to be"

    • moderate disk and CPU requirements when indexing

      • important in shared hosting enivronment, not to mention the indexing speed.

    Despite the amount of time passed and numerous improvements made in the other solutions, there's still no solution which I personally would be eager to migrate to.

    Considering that and a lot of positive feedback received from Sphinx users during last years, the obvious decision is to continue developing Sphinx (and, eventually, to take over the world).

    Chapter 2. Installation

    2.1. Supported systems

    Most modern UNIX systems with a C++ compiler should be able to compile and run Sphinx without any modifications.

    Currently known systems Sphinx has been successfully running on are:

    • Linux 2.4.x, 2.6.x (many various distributions)

    • Windows 2000, XP, 7

    • FreeBSD 4.x, 5.x, 6.x, 7.x, 8.x

    • NetBSD 1.6, 3.0

    • Solaris 9, 11

    • Mac OS X

    CPU architectures known to work include i386 (aka x86), amd64 (aka x86_64), SPARC64, and ARM.

    Chances are good that Sphinx should work on other Unix platforms and/or CPU architectures just as well. Please report any other platforms that worked for you!

    All platforms are production quality. There are no principal functional limitations on any platform.

    2.2. Required tools

    On UNIX, you will need the following tools to build and install Sphinx:

    • a working C++ compiler. GNU gcc is known to work.

    • a good make program. GNU make is known to work.

    On Windows, you will need Microsoft Visual C/C++ Studio .NET 2005 or above. Other compilers/environments will probably work as well, but for the time being, you will have to build makefile (or other environment specific project files) manually.

    2.3. Installing Sphinx on Linux

    1. Extract everything from the distribution tarball (haven't you already?) and go to the sphinx subdirectory. (We are using version 2.0.1-beta here for the sake of example only; be sure to change this to a specific version you're using.)

      $ tar xzvf sphinx-2.0.1-beta.tar.gz
      $ cd sphinx

    2. Run the configuration program:

      $ ./configure

      There's a number of options to configure. The complete listing may be obtained by using --help switch. The most important ones are:

      • --prefix, which specifies where to install Sphinx; such as --prefix=/usr/local/sphinx (all of the examples use this prefix)

      • --with-mysql, which specifies where to look for MySQL include and library files, if auto-detection fails;

      • --with-pgsql, which specifies where to look for PostgreSQL include and library files.

    3. Build the binaries:

      $ make

    4. Install the binaries in the directory of your choice: (defaults to /usr/local/bin/ on *nix systems, but is overridden with configure --prefix)

      $ make install

    2.4. Installing Sphinx on Windows

    Installing Sphinx on a Windows server is often easier than installing on a Linux environment; unless you are preparing code patches, you can use the pre-compiled binary files from the Downloads area on the website.

    1. Extract everything from the .zip file you have downloaded - sphinx-2.0.1-beta-win32.zip, or sphinx-2.0.1-beta-win32-pgsql.zip if you need PostgresSQL support as well. (We are using version 2.0.1-beta here for the sake of example only; be sure to change this to a specific version you're using.) You can use Windows Explorer in Windows XP and up to extract the files, or a freeware package like 7Zip to open the archive.

      For the remainder of this guide, we will assume that the folders are unzipped into C:\Sphinx, such that searchd.exe can be found in C:\Sphinx\bin\searchd.exe. If you decide to use any different location for the folders or configuration file, please change it accordingly.

    2. Edit the contents of sphinx.conf.in - specifically entries relating to @CONFDIR@ - to paths suitable for your system.

    3. Install the searchd system as a Windows service:

      C:\Sphinx\bin> C:\Sphinx\bin\searchd --install --config C:\Sphinx\sphinx.conf.in --servicename SphinxSearch

    4. The searchd service will now be listed in the Services panel within the Management Console, available from Administrative Tools. It will not have been started, as you will need to configure it and build your indexes with indexer before starting the service. A guide to do this can be found under Quick tour.

      During the next steps of the install (which involve running indexer pretty much as you would on Linux) you may find that you get an error relating to libmysql.dll not being found. If you have MySQL installed, you should find a copy of this library in your Windows directory, or sometimes in Windows\System32, or failing that in the MySQL core directories. If you do receive an error please copy libmysql.dll into the bin directory.

    2.5. Known installation issues

    If configure fails to locate MySQL headers and/or libraries, try checking for and installing mysql-devel package. On some systems, it is not installed by default.

    If make fails with a message which look like

    /bin/sh: g++: command not found
    make[1]: *** [libsphinx_a-sphinx.o] Error 127
    

    try checking for and installing gcc-c++ package.

    If you are getting compile-time errors which look like

    sphinx.cpp:67: error: invalid application of `sizeof' to
        incomplete type `Private::SizeError<false>'
    

    this means that some compile-time type size check failed. The most probable reason is that off_t type is less than 64-bit on your system. As a quick hack, you can edit sphinx.h and replace off_t with DWORD in a typedef for SphOffset_t, but note that this will prohibit you from using full-text indexes larger than 2 GB. Even if the hack helps, please report such issues, providing the exact error message and compiler/OS details, so I could properly fix them in next releases.

    If you keep getting any other error, or the suggestions above do not seem to help you, please don't hesitate to contact me.

    2.6. Quick Sphinx usage tour

    All the example commands below assume that you installed Sphinx in /usr/local/sphinx, so searchd can be found in /usr/local/sphinx/bin/searchd.

    To use Sphinx, you will need to:

    1. Create a configuration file.

      Default configuration file name is sphinx.conf. All Sphinx programs look for this file in current working directory by default.

      Sample configuration file, sphinx.conf.dist, which has all the options documented, is created by configure. Copy and edit that sample file to make your own configuration: (assuming Sphinx is installed into /usr/local/sphinx/)

      $ cd /usr/local/sphinx/etc
      $ cp sphinx.conf.dist sphinx.conf
      $ vi sphinx.conf

      Sample configuration file is setup to index documents table from MySQL database test; so there's example.sql sample data file to populate that table with a few documents for testing purposes:

      $ mysql -u test < /usr/local/sphinx/etc/example.sql

    2. Run the indexer to create full-text index from your data:

      $ cd /usr/local/sphinx/etc
      $ /usr/local/sphinx/bin/indexer --all

    3. Query your newly created index!

    To query the index from command line, use search utility:

    $ cd /usr/local/sphinx/etc
    $ /usr/local/sphinx/bin/search test

    To query the index from your PHP scripts, you need to:

    1. Run the search daemon which your script will talk to:

      $ cd /usr/local/sphinx/etc
      $ /usr/local/sphinx/bin/searchd

    2. Run the attached PHP API test script (to ensure that the daemon was succesfully started and is ready to serve the queries):

      $ cd sphinx/api
      $ php test.php test

    3. Include the API (it's located in api/sphinxapi.php) into your own scripts and use it.

    Happy searching!

    Chapter 3. Indexing

    3.1. Data sources

    The data to be indexed can generally come from very different sources: SQL databases, plain text files, HTML files, mailboxes, and so on. From Sphinx point of view, the data it indexes is a set of structured documents, each of which has the same set of fields and attributes. This is similar to SQL, where each row would correspond to a document, and each column to either a field or an attribute.

    Depending on what source Sphinx should get the data from, different code is required to fetch the data and prepare it for indexing. This code is called data source driver (or simply driver or data source for brevity).

    At the time of this writing, there are built-in drivers for MySQL, PostgreSQL, MS SQL (on Windows), and ODBC. There is also a generic driver called xmlpipe, which runs a specified command and reads the data from its stdout. See Section 3.9, “xmlpipe data source” section for the format description.

    There can be as many sources per index as necessary. They will be sequentially processed in the very same order which was specifed in index definition. All the documents coming from those sources will be merged as if they were coming from a single source.

    3.2. Full-text fields

    Full-text fields (or just fields for brevity) are the textual document contents that get indexed by Sphinx, and can be (quickly) searched for keywords.

    Fields are named, and you can limit your searches to a single field (eg. search through "title" only) or a subset of fields (eg. to "title" and "abstract" only). Sphinx index format generally supports up to 256 fields. However, up to version 2.0.1-beta indexes were forcibly limited by 32 fields, because of certain complications in the matching engine. Full support for up to 256 fields was added in version 2.0.2-beta.

    Note that the original contents of the fields are not stored in the Sphinx index. The text that you send to Sphinx gets processed, and a full-text index (a special data structure that enables quick searches for a keyword) gets built from that text. But the original text contents are then simply discarded. Sphinx assumes that you store those contents elsewhere anyway.

    Moreover, it is impossible to fully reconstruct the original text, because the specific whitespace, capitalization, punctuation, etc will all be lost during indexing. It is theoretically possible to partially reconstruct a given document from the Sphinx full-text index, but that would be a slow process (especially if the CRC dictionary is used, which does not even store the original keywords and works with their hashes instead).

    3.3. Attributes

    Attributes are additional values associated with each document that can be used to perform additional filtering and sorting during search.

    It is often desired to additionally process full-text search results based not only on matching document ID and its rank, but on a number of other per-document values as well. For instance, one might need to sort news search results by date and then relevance, or search through products within specified price range, or limit blog search to posts made by selected users, or group results by month. To do that efficiently, Sphinx allows to attach a number of additional attributes to each document, and store their values in the full-text index. It's then possible to use stored values to filter, sort, or group full-text matches.

    Attributes, unlike the fields, are not full-text indexed. They are stored in the index, but it is not possible to search them as full-text, and attempting to do so results in an error.

    For example, it is impossible to use the extended matching mode expression @column 1 to match documents where column is 1, if column is an attribute, and this is still true even if the numeric digits are normally indexed.

    Attributes can be used for filtering, though, to restrict returned rows, as well as sorting or result grouping; it is entirely possible to sort results purely based on attributes, and ignore the search relevance tools. Additionally, attributes are returned from the search daemon, while the indexed text is not.

    A good example for attributes would be a forum posts table. Assume that only title and content fields need to be full-text searchable - but that sometimes it is also required to limit search to a certain author or a sub-forum (ie. search only those rows that have some specific values of author_id or forum_id columns in the SQL table); or to sort matches by post_date column; or to group matching posts by month of the post_date and calculate per-group match counts.

    This can be achieved by specifying all the mentioned columns (excluding title and content, that are full-text fields) as attributes, indexing them, and then using API calls to setup filtering, sorting, and grouping. Here as an example.

    Example sphinx.conf part:

    ...
    sql_query = SELECT id, title, content, \
    	author_id, forum_id, post_date FROM my_forum_posts
    sql_attr_uint = author_id
    sql_attr_uint = forum_id
    sql_attr_timestamp = post_date
    ...
    

    Example application code (in PHP):

    // only search posts by author whose ID is 123
    $cl->SetFilter ( "author_id", array ( 123 ) );
    
    // only search posts in sub-forums 1, 3 and 7
    $cl->SetFilter ( "forum_id", array ( 1,3,7 ) );
    
    // sort found posts by posting date in descending order
    $cl->SetSortMode ( SPH_SORT_ATTR_DESC, "post_date" );
    

    Attributes are named. Attribute names are case insensitive. Attributes are not full-text indexed; they are stored in the index as is. Currently supported attribute types are:

    • unsigned integers (1-bit to 32-bit wide);

    • UNIX timestamps;

    • floating point values (32-bit, IEEE 754 single precision);

    • string ordinals (specially computed integers);

    • strings (since 1.10-beta);

    • MVA, multi-value attributes (variable-length lists of 32-bit unsigned integers).

    The complete set of per-document attribute values is sometimes referred to as docinfo. Docinfos can either be

    • stored separately from the main full-text index data ("extern" storage, in .spa file), or

    • attached to each occurence of document ID in full-text index data ("inline" storage, in .spd file).

    When using extern storage, a copy of .spa file (with all the attribute values for all the documents) is kept in RAM by searchd at all times. This is for performance reasons; random disk I/O would be too slow. On the contrary, inline storage does not require any additional RAM at all, but that comes at the cost of greatly inflating the index size: remember that it copies all attribute value every time when the document ID is mentioned, and that is exactly as many times as there are different keywords in the document. Inline may be the only viable option if you have only a few attributes and need to work with big datasets in limited RAM. However, in most cases extern storage makes both indexing and searching much more efficient.

    Search-time memory requirements for extern storage are (1+number_of_attrs)*number_of_docs*4 bytes, ie. 10 million docs with 2 groups and 1 timestamp will take (1+2+1)*10M*4 = 160 MB of RAM. This is PER DAEMON, not per query. searchd will allocate 160 MB on startup, read the data and keep it shared between queries. The children will NOT allocate any additional copies of this data.

    3.4. MVA (multi-valued attributes)

    MVAs, or multi-valued attributes, are an important special type of per-document attributes in Sphinx. MVAs let you attach sets of numeric values to every document. That is useful to implement article tags, product categories, etc. Filtering and group-by (but not sorting) on MVA attributes is supported.

    As of version 2.0.2-beta, MVA values can either be unsigned 32-bit integers (UNSIGNED INTEGER) or signed 64-bit integers (BIGINT). Up to version 2.0.1-beta, only the unsigned 32-bit values were supported.

    The set size is not limited, you can have an arbitrary number of values attached to each document as long as RAM permits (.spm file that contains the MVA values will be precached in RAM by searchd). The source data can be taken either from a separate query, or from a document field; see source type in sql_attr_multi. In the first case the query will have to return pairs of document ID and MVA values, in the second one the field will be parsed for integer values. There are absolutely no requirements as to incoming data order; the values will be automatically grouped by document ID (and internally sorted within the same ID) during indexing anyway.

    When filtering, a document will match the filter on MVA attribute if any of the values satisfy the filtering condition. (Therefore, documents that pass through exclude filters will not contain any of the forbidden values.) When grouping by MVA attribute, a document will contribute to as many groups as there are different MVA values associated with that document. For instance, if the collection contains exactly 1 document having a 'tag' MVA with values 5, 7, and 11, grouping on 'tag' will produce 3 groups with '@count' equal to 1 and '@groupby' key values of 5, 7, and 11 respectively. Also note that grouping by MVA might lead to duplicate documents in the result set: because each document can participate in many groups, it can be chosen as the best one in in more than one group, leading to duplicate IDs. PHP API historically uses ordered hash on the document ID for the resulting rows; so you'll also need to use SetArrayResult() in order to employ group-by on MVA with PHP API.

    3.5. Indexes

    To be able to answer full-text search queries fast, Sphinx needs to build a special data structure optimized for such queries from your text data. This structure is called index; and the process of building index from text is called indexing.

    Different index types are well suited for different tasks. For example, a disk-based tree-based index would be easy to update (ie. insert new documents to existing index), but rather slow to search. Therefore, Sphinx architecture allows for different index types to be implemented easily.

    The only index type which is implemented in Sphinx at the moment is designed for maximum indexing and searching speed. This comes at a cost of updates being really slow; theoretically, it might be slower to update this type of index than than to reindex it from scratch. However, this very frequently could be worked around with muiltiple indexes, see Section 3.11, “Live index updates” for details.

    It is planned to implement more index types, including the type which would be updateable in real time.

    There can be as many indexes per configuration file as necessary. indexer utility can reindex either all of them (if --all option is specified), or a certain explicitly specified subset. searchd utility will serve all the specified indexes, and the clients can specify what indexes to search in run time.

    3.6. Restrictions on the source data

    There are a few different restrictions imposed on the source data which is going to be indexed by Sphinx, of which the single most important one is:

    ALL DOCUMENT IDS MUST BE UNIQUE UNSIGNED NON-ZERO INTEGER NUMBERS (32-BIT OR 64-BIT, DEPENDING ON BUILD TIME SETTINGS).

    If this requirement is not met, different bad things can happen. For instance, Sphinx can crash with an internal assertion while indexing; or produce strange results when searching due to conflicting IDs. Also, a 1000-pound gorilla might eventually come out of your display and start throwing barrels at you. You've been warned.

    3.7. Charsets, case folding, and translation tables

    When indexing some index, Sphinx fetches documents from the specified sources, splits the text into words, and does case folding so that "Abc", "ABC" and "abc" would be treated as the same word (or, to be pedantic, term).

    To do that properly, Sphinx needs to know

    • what encoding is the source text in;

    • what characters are letters and what are not;

    • what letters should be folded to what letters.

    This should be configured on a per-index basis using charset_type and charset_table options. charset_type specifies whether the document encoding is single-byte (SBCS) or UTF-8. charset_table specifies the table that maps letter characters to their case folded versions. The characters that are not in the table are considered to be non-letters and will be treated as word separators when indexing or searching through this index.

    Note that while default tables do not include space character (ASCII code 0x20, Unicode U+0020) as a letter, it's in fact perfectly legal to do so. This can be useful, for instance, for indexing tag clouds, so that space-separated word sets would index as a single search query term.

    Default tables currently include English and Russian characters. Please do submit your tables for other languages!

    3.8. SQL data sources (MySQL, PostgreSQL)

    With all the SQL drivers, indexing generally works as follows.

    Most options, such as database user/host/password, are straightforward. However, there are a few subtle things, which are discussed in more detail here.

    Ranged queries

    Main query, which needs to fetch all the documents, can impose a read lock on the whole table and stall the concurrent queries (eg. INSERTs to MyISAM table), waste a lot of memory for result set, etc. To avoid this, Sphinx supports so-called ranged queries. With ranged queries, Sphinx first fetches min and max document IDs from the table, and then substitutes different ID intervals into main query text and runs the modified query to fetch another chunk of documents. Here's an example.

    Example 3.1. Ranged query usage example

    # in sphinx.conf
    
    sql_query_range	= SELECT MIN(id),MAX(id) FROM documents
    sql_range_step = 1000
    sql_query = SELECT * FROM documents WHERE id>=$start AND id<=$end
    

    If the table contains document IDs from 1 to, say, 2345, then sql_query would be run three times:

    1. with $start replaced with 1 and $end replaced with 1000;

    2. with $start replaced with 1001 and $end replaced with 2000;

    3. with $start replaced with 2000 and $end replaced with 2345.

    Obviously, that's not much of a difference for 2000-row table, but when it comes to indexing 10-million-row MyISAM table, ranged queries might be of some help.

    sql_post vs. sql_post_index

    The difference between post-query and post-index query is in that post-query is run immediately when Sphinx received all the documents, but further indexing may still fail for some other reason. On the contrary, by the time the post-index query gets executed, it is guaranteed that the indexing was succesful. Database connection is dropped and re-established because sorting phase can be very lengthy and would just timeout otherwise.

    3.9. xmlpipe data source

    xmlpipe data source was designed to enable users to plug data into Sphinx without having to implement new data sources drivers themselves. It is limited to 2 fixed fields and 2 fixed attributes, and is deprecated in favor of Section 3.10, “xmlpipe2 data source” now. For new streams, use xmlpipe2.

    To use xmlpipe, configure the data source in your configuration file as follows:

    source example_xmlpipe_source
    {
        type = xmlpipe
        xmlpipe_command = perl /www/mysite.com/bin/sphinxpipe.pl
    }
    

    The indexer will run the command specified in xmlpipe_command, and then read, parse and index the data it prints to stdout. More formally, it opens a pipe to given command and then reads from that pipe.

    indexer will expect one or more documents in custom XML format. Here's the example document stream, consisting of two documents:

    Example 3.2. XMLpipe document stream

    <document>
    <id>123</id>
    <group>45</group>
    <timestamp>1132223498</timestamp>
    <title>test title</title>
    <body>
    this is my document body
    </body>
    </document>
    
    <document>
    <id>124</id>
    <group>46</group>
    <timestamp>1132223498</timestamp>
    <title>another test</title>
    <body>
    this is another document
    </body>
    </document>
    


    Legacy xmlpipe legacy driver uses a builtin parser which is pretty fast but really strict and does not actually fully support XML. It requires that all the fields must be present, formatted exactly as in this example, and occur exactly in the same order. The only optional field is timestamp; it defaults to 1.

    3.10. xmlpipe2 data source

    xmlpipe2 lets you pass arbitrary full-text and attribute data to Sphinx in yet another custom XML format. It also allows to specify the schema (ie. the set of fields and attributes) either in the XML stream itself, or in the source settings.

    When indexing xmlpipe2 source, indexer runs the given command, opens a pipe to its stdout, and expects well-formed XML stream. Here's sample stream data:

    Example 3.3. xmlpipe2 document stream

    <?xml version="1.0" encoding="utf-8"?>
    <sphinx:docset>
    
    <sphinx:schema>
    <sphinx:field name="subject"/> 
    <sphinx:field name="content"/>
    <sphinx:attr name="published" type="timestamp"/>
    <sphinx:attr name="author_id" type="int" bits="16" default="1"/>
    </sphinx:schema>
    
    <sphinx:document id="1234">
    <content>this is the main content <![CDATA[[and this <cdata> entry
    must be handled properly by xml parser lib]]></content>
    <published>1012325463</published>
    <subject>note how field/attr tags can be
    in <b class="red">randomized</b> order</subject>
    <misc>some undeclared element</misc>
    </sphinx:document>
    
    <sphinx:document id="1235">
    <subject>another subject</subject>
    <content>here comes another document, and i am given to understand,
    that in-document field order must not matter, sir</content>
    <published>1012325467</published>
    </sphinx:document>
    
    <!-- ... even more sphinx:document entries here ... -->
    
    <sphinx:killlist>
    <id>1234</id>
    <id>4567</id>
    </sphinx:killlist>
    
    </sphinx:docset>
    


    Arbitrary fields and attributes are allowed. They also can occur in the stream in arbitrary order within each document; the order is ignored. There is a restriction on maximum field length; fields longer than 2 MB will be truncated to 2 MB (this limit can be changed in the source).

    The schema, ie. complete fields and attributes list, must be declared before any document could be parsed. This can be done either in the configuration file using xmlpipe_field and xmlpipe_attr_XXX settings, or right in the stream using <sphinx:schema> element. <sphinx:schema> is optional. It is only allowed to occur as the very first sub-element in <sphinx:docset>. If there is no in-stream schema definition, settings from the configuration file will be used. Otherwise, stream settings take precedence.

    Unknown tags (which were not declared neither as fields nor as attributes) will be ignored with a warning. In the example above, <misc> will be ignored. All embedded tags and their attributes (such as <b> in <subject> in the example above) will be silently ignored.

    Support for incoming stream encodings depends on whether iconv is installed on the system. xmlpipe2 is parsed using libexpat parser that understands US-ASCII, ISO-8859-1, UTF-8 and a few UTF-16 variants natively. Sphinx configure script will also check for libiconv presence, and utilize it to handle other encodings. libexpat also enforces the requirement to use UTF-8 charset on Sphinx side, because the parsed data it returns is always in UTF-8.

    XML elements (tags) recognized by xmlpipe2 (and their attributes where applicable) are:

    sphinx:docset

    Mandatory top-level element, denotes and contains xmlpipe2 document set.

    sphinx:schema

    Optional element, must either occur as the very first child of sphinx:docset, or never occur at all. Declares the document schema. Contains field and attribute declarations. If present, overrides per-source settings from the configuration file.

    sphinx:field

    Optional element, child of sphinx:schema. Declares a full-text field. Known attributes are:

    • "name", specifies the XML element name that will be treated as a full-text field in the subsequent documents.

    • "attr", specifies whether to also index this field as a string or word count attribute. Possible values are "string" and "wordcount". Introduced in version 1.10-beta.

    sphinx:attr

    Optional element, child of sphinx:schema. Declares an attribute. Known attributes are:

    • "name", specifies the element name that should be treated as an attribute in the subsequent documents.

    • "type", specifies the attribute type. Possible values are "int", "timestamp", "str2ordinal", "bool", "float" and "multi".

    • "bits", specifies the bit size for "int" attribute type. Valid values are 1 to 32.

    • "default", specifies the default value for this attribute that should be used if the attribute's element is not present in the document.

    sphinx:document

    Mandatory element, must be a child of sphinx:docset. Contains arbitrary other elements with field and attribute values to be indexed, as declared either using sphinx:field and sphinx:attr elements or in the configuration file. The only known attribute is "id" that must contain the unique integer document ID.

    sphinx:killlist

    Optional element, child of sphinx:docset. Contains a number of "id" elements whose contents are document IDs to be put into a kill-list for this index.

    3.11. Live index updates

    There are two major approaches to maintaining the full-text index contents up to date. Note, however, that both these approaches deal with the task of full-text data updates, and not attribute updates. Instant attribute updates are supported since version 0.9.8. Refer to UpdateAttributes() API call description for details.

    First, you can use disk-based indexes, partition them manually, and only rebuild the smaller partitions (so-called "deltas") frequently. By minimizing the rebuild size, you can reduce the average indexing lag to something as low as 30-60 seconds. This approach was the the only one available in versions 0.9.x. On huge collections it actually might be the most efficient one. Refer to Section 3.12, “Delta index updates” for details.

    Second, versions 1.x (starting with 1.10-beta) add support for so-called real-time indexes (RT indexes for short) that on-the-fly updates of the full-text data. Updates on a RT index can appear in the search results in 1-2 milliseconds, ie. 0.001-0.002 seconds. However, RT index are less efficient for bulk indexing huge amounts of data. Refer to Chapter 4, Real-time indexes for details.

    3.12. Delta index updates

    There's a frequent situation when the total dataset is too big to be reindexed from scratch often, but the amount of new records is rather small. Example: a forum with a 1,000,000 archived posts, but only 1,000 new posts per day.

    In this case, "live" (almost real time) index updates could be implemented using so called "main+delta" scheme.

    The idea is to set up two sources and two indexes, with one "main" index for the data which only changes rarely (if ever), and one "delta" for the new documents. In the example above, 1,000,000 archived posts would go to the main index, and newly inserted 1,000 posts/day would go to the delta index. Delta index could then be reindexed very frequently, and the documents can be made available to search in a matter of minutes.

    Specifying which documents should go to what index and reindexing main index could also be made fully automatic. One option would be to make a counter table which would track the ID which would split the documents, and update it whenever the main index is reindexed.

    Example 3.4. Fully automated live updates

    # in MySQL
    CREATE TABLE sph_counter
    (
        counter_id INTEGER PRIMARY KEY NOT NULL,
        max_doc_id INTEGER NOT NULL
    );
    
    # in sphinx.conf
    source main
    {
        # ...
        sql_query_pre = SET NAMES utf8
        sql_query_pre = REPLACE INTO sph_counter SELECT 1, MAX(id) FROM documents
        sql_query = SELECT id, title, body FROM documents \
            WHERE id<=( SELECT max_doc_id FROM sph_counter WHERE counter_id=1 )
    }
    
    source delta : main
    {
        sql_query_pre = SET NAMES utf8
        sql_query = SELECT id, title, body FROM documents \
            WHERE id>( SELECT max_doc_id FROM sph_counter WHERE counter_id=1 )
    }
    
    index main
    {
        source = main
        path = /path/to/main
        # ... all the other settings
    }
    
    # note how all other settings are copied from main,
    # but source and path are overridden (they MUST be)
    index delta : main
    {
        source = delta
        path = /path/to/delta
    }
    


    Note how we're overriding sql_query_pre in the delta source. We need to explicitly have that override. Otherwise REPLACE query would be run when indexing delta source too, effectively nullifying it. However, when we issue the directive in the inherited source for the first time, it removes all inherited values, so the encoding setup is also lost. So sql_query_pre in the delta can not just be empty; and we need to issue the encoding setup query explicitly once again.

    3.13. Index merging

    Merging two existing indexes can be more efficient that indexing the data from scratch, and desired in some cases (such as merging 'main' and 'delta' indexes instead of simply reindexing 'main' in 'main+delta' partitioning scheme). So indexer has an option to do that. Merging the indexes is normally faster than reindexing but still not instant on huge indexes. Basically, it will need to read the contents of both indexes once and write the result once. Merging 100 GB and 1 GB index, for example, will result in 202 GB of IO (but that's still likely less than the indexing from scratch requires).

    The basic command syntax is as follows:

    indexer --merge DSTINDEX SRCINDEX [--rotate]
    

    Only the DSTINDEX index will be affected: the contents of SRCINDEX will be merged into it. --rotate switch will be required if DSTINDEX is already being served by searchd. The initially devised usage pattern is to merge a smaller update from SRCINDEX into DSTINDEX. Thus, when merging the attributes, values from SRCINDEX will win if duplicate document IDs are encountered. Note, however, that the "old" keywords will not be automatically removed in such cases. For example, if there's a keyword "old" associated with document 123 in DSTINDEX, and a keyword "new" associated with it in SRCINDEX, document 123 will be found by both keywords after the merge. You can supply an explicit condition to remove documents from DSTINDEX to mitigate that; the relevant switch is --merge-dst-range:

    indexer --merge main delta --merge-dst-range deleted 0 0
    

    This switch lets you apply filters to the destination index along with merging. There can be several filters; all of their conditions must be met in order to include the document in the resulting mergid index. In the example above, the filter passes only those records where 'deleted' is 0, eliminating all records that were flagged as deleted (for instance, using UpdateAttributes() call).

    Chapter 4. Real-time indexes

    Real-time indexes (or RT indexes for brevity) are a new backend that lets you insert, update, or delete documents (rows) on the fly. RT indexes were added in version 1.10-beta. While querying of RT indexes is possible using any of the SphinxAPI, SphinxQL, or SphinxSE, updating them is only possible via SphinxQL at the moment. Full SphinxQL reference is available in Chapter 7, SphinxQL reference.

    4.1. RT indexes overview

    RT indexes should be declared in sphinx.conf, just as every other index type. Notable differences from the regular, disk-based indexes are that a) data sources are not required and ignored, and b) you should explicitly enumerate all the text fields, not just attributes. Here's an example:

    Example 4.1. RT index declaration

    index rt
    {
    	type = rt
    	path = /usr/local/sphinx/data/rt
    	rt_field = title
    	rt_field = content
    	rt_attr_uint = gid
    }
    

    As of 2.0.1-beta and above, RT indexes are production quality, despite a few missing features.

    RT index can be accessed using MySQL protocol. INSERT, REPLACE, DELETE, and SELECT statements against RT index are supported. For instance, this is an example session with the sample index above:

    $ mysql -h 127.0.0.1 -P 9306
    Welcome to the MySQL monitor.  Commands end with ; or \g.
    Your MySQL connection id is 1
    Server version: 1.10-dev (r2153)
    
    Type 'help;' or '\h' for help. Type '\c' to clear the buffer.
    
    mysql> INSERT INTO rt VALUES ( 1, 'first record', 'test one', 123 );
    Query OK, 1 row affected (0.05 sec)
    
    mysql> INSERT INTO rt VALUES ( 2, 'second record', 'test two', 234 );
    Query OK, 1 row affected (0.00 sec)
    
    mysql> SELECT * FROM rt;
    +------+--------+------+
    | id   | weight | gid  |
    +------+--------+------+
    |    1 |      1 |  123 |
    |    2 |      1 |  234 |
    +------+--------+------+
    2 rows in set (0.02 sec)
    
    mysql> SELECT * FROM rt WHERE MATCH('test');
    +------+--------+------+
    | id   | weight | gid  |
    +------+--------+------+
    |    1 |   1643 |  123 |
    |    2 |   1643 |  234 |
    +------+--------+------+
    2 rows in set (0.01 sec)
    
    mysql> SELECT * FROM rt WHERE MATCH('@title test');
    Empty set (0.00 sec)
    

    Both partial and batch INSERT syntaxes are supported, ie. you can specify a subset of columns, and insert several rows at a time. Deletions are also possible using DELETE statement; the only currently supported syntax is DELETE FROM <index> WHERE id=<id>. REPLACE is also supported, enabling you to implement updates.

    mysql> INSERT INTO rt ( id, title ) VALUES ( 3, 'third row' ), ( 4, 'fourth entry' );
    Query OK, 2 rows affected (0.01 sec)
    
    mysql> SELECT * FROM rt;
    +------+--------+------+
    | id   | weight | gid  |
    +------+--------+------+
    |    1 |      1 |  123 |
    |    2 |      1 |  234 |
    |    3 |      1 |    0 |
    |    4 |      1 |    0 |
    +------+--------+------+
    4 rows in set (0.00 sec)
    
    mysql> DELETE FROM rt WHERE id=2;
    Query OK, 0 rows affected (0.00 sec)
    
    mysql> SELECT * FROM rt WHERE MATCH('test');
    +------+--------+------+
    | id   | weight | gid  |
    +------+--------+------+
    |    1 |   1500 |  123 |
    +------+--------+------+
    1 row in set (0.00 sec)
    
    mysql> INSERT INTO rt VALUES ( 1, 'first record on steroids', 'test one', 123 );
    ERROR 1064 (42000): duplicate id '1'
    
    mysql> REPLACE INTO rt VALUES ( 1, 'first record on steroids', 'test one', 123 );
    Query OK, 1 row affected (0.01 sec)
    
    mysql> SELECT * FROM rt WHERE MATCH('steroids');
    +------+--------+------+
    | id   | weight | gid  |
    +------+--------+------+
    |    1 |   1500 |  123 |
    +------+--------+------+
    1 row in set (0.01 sec)
    

    Data stored in RT index should survive clean shutdown. When binary logging is enabled, it should also survive crash and/or dirty shutdown, and recover on subsequent startup.

    4.2. Known caveats with RT indexes

    As of 1.10-beta, RT indexes are a beta quality feature: while no major, showstopper-class issues are known, there still are a few known usage quirks. Those quirks are listed in this section.

    • Prefix and infix indexing are not supported yet.

    • MVAs are not supported yet.

    • Disk chunks optimization routine is not implemented yet.

    • On initial index creation, attributes are reordered by type, in the following order: uint, bigint, float, timestamp, string. So when using INSERT without an explicit column names list, specify all uint column values first, then bigint, etc.

    • Default conservative RAM chunk limit (rt_mem_limit) of 32M can lead to poor performance on bigger indexes, you should raise it to 256..1024M if you're planning to index gigabytes.

    • High DELETE/REPLACE rate can lead to kill-list fragmentation and impact searching performance.

    • No transaction size limits are currently imposed; too many concurrent INSERT/REPLACE transactions might therefore consume a lot of RAM.

    • In case of a damaged binlog, recovery will stop on the first damaged transaction, even though it's technically possible to keep looking further for subsequent undamaged transactions, and recover those. This mid-file damage case (due to flaky HDD/CDD/tape?) is supposed to be extremely rare, though.

    • Multiple INSERTs grouped in a single transaction perform better than equivalent single-row transactions and are recommended for batch loading of data.

    4.3. RT index internals

    RT index is internally chunked. It keeps a so-called RAM chunk that stores all the most recent changes. RAM chunk memory usage is rather strictly limited with per-index rt_mem_limit directive. Once RAM chunk grows over this limit, a new disk chunk is created from its data, and RAM chunk is reset. Thus, while most changes on the RT index will be performed in RAM only and complete instantly (in milliseconds), those changes that overflow the RAM chunk will stall for the duration of disk chunk creation (a few seconds).

    Disk chunks are, in fact, just regular disk-based indexes. But they're a part of an RT index and automatically managed by it, so you need not configure nor manage them manually. Because a new disk chunk is created every time RT chunk overflows the limit, and because in-memory chunk format is close to on-disk format, the disk chunks will be approximately rt_mem_limit bytes in size each.

    Generally, it is better to set the limit bigger, to minimize both the frequency of flushes, and the index fragmentation (number of disk chunks). For instance, on a dedicated search server that handles a big RT index, it can be advised to set rt_mem_limit to 1-2 GB. A global limit on all indexes is also planned, but not yet implemented yet as of 1.10-beta.

    Disk chunk full-text index data can not be actually modified, so the full-text field changes (ie. row deletions and updates) suppress a previous row version from a disk chunk using a kill-list, but do not actually physically purge the data. Therefore, on workloads with high full-text updates ratio index might eventually get polluted by these previous row versions, and searching performance would degrade. Physical index purging that would improve the performance is planned, but not yet implemented as of 1.10-beta.

    Data in RAM chunk gets saved to disk on clean daemon shutdown, and then loaded back on startup. However, on daemon or server crash, updates from RAM chunk might be lost. To prevent that, binary logging of transactions can be used; see Section 4.4, “Binary logging” for details.

    Full-text changes in RT index are transactional. They are stored in a per-thread accumulator until COMMIT, then applied at once. Bigger batches per single COMMIT should result in faster indexing.

    4.4. Binary logging

    Binary logs are essentially a recovery mechanism. With binary logs enabled, searchd writes every given transaction to the binlog file, and uses that for recovery after an unclean shutdown. On clean shutdown, RAM chunks are saved to disk, and then all the binlog files are unlinked.

    During normal operation, a new binlog file will be opened every time when binlog_max_log_size limit is reached. Older, already closed binlog files are kept until all of the transactions stored in them (from all indexes) are flushed as a disk chunk. Setting the limit to 0 pretty much prevents binlog from being unlinked at all while searchd is running; however, it will still be unlinked on clean shutdown. (This is the default case as of 2.0.3-release, binlog_max_log_size defaults to 0.)

    There are 3 different binlog flushing strategies, controlled by binlog_flush directive which takes the values of 0, 1, or 2. 0 means to flush the log to OS and sync it to disk every second; 1 means flush and sync every transaction; and 2 (the default mode) means flush every transaction but sync every second. Sync is relatively slow because it has to perform physical disk writes, so mode 1 is the safest (every committed transaction is guaranteed to be written on disk) but the slowest. Flushing log to OS prevents from data loss on searchd crashes but not system crashes. Mode 2 is the default.

    On recovery after an unclean shutdown, binlogs are replayed and all logged transactions since the last good on-disk state are restored. Transactions are checksummed so in case of binlog file corruption garbage data will not be replayed; such a broken transaction will be detected and, currently, will stop replay. Transactions also start with a magic marker and timestamped, so in case of binlog damage in the middle of the file, it's technically possible to skip broken transactions and keep replaying from the next good one, and/or it's possible to replay transactions until a given timestamp (point-in-time recovery), but none of that is implemented yet as of 1.10-beta.

    One unwanted side effect of binlogs is that actively updating a small RT index that fully fits into a RAM chunk part will lead to an ever-growing binlog that can never be unlinked until clean shutdown. Binlogs are essentially append-only deltas against the last known good saved state on disk, and unless RAM chunk gets saved, they can not be unlinked. An ever-growing binlog is not very good for disk use and crash recovery time. Starting with 2.0.1-beta you can configure searchd to perform a periodic RAM chunk flush to fix that problem using a rt_flush_period directive. With periodic flushes enabled, searchd will keep a separate thread, checking whether RT indexes RAM chunks need to be written back to disk. Once that happens, the respective binlogs can be (and are) safely unlinked.

    Note that rt_flush_period only controls the frequency at which the checks happen. There are no guarantees that the particular RAM chunk will get saved. For instance, it does not make sense to regularly re-save a huge RAM chunk that only gets a few rows worh of updates. The search daemon determine whether to actually perform the flush with a few heuristics.

    Chapter 5. Searching

    5.1. Matching modes

    So-called matching modes are a legacy feature that used to provide (very) limited query syntax and ranking support. Currently, they are deprecated in favor of full-text query language and so-called rankers. Starting with version 0.9.9-release, it is thus strongly recommended to use SPH_MATCH_EXTENDED and proper query syntax rather than any other legacy mode. All those other modes are actually internally converted to extended syntax anyway. SphinxAPI still defaults to SPH_MATCH_ALL but that is for compatibility reasons only.

    There are the following matching modes available:

    • SPH_MATCH_ALL, matches all query words (default mode);

    • SPH_MATCH_ANY, matches any of the query words;

    • SPH_MATCH_PHRASE, matches query as a phrase, requiring perfect match;

    • SPH_MATCH_BOOLEAN, matches query as a boolean expression (see Section 5.2, “Boolean query syntax”);

    • SPH_MATCH_EXTENDED, matches query as an expression in Sphinx internal query language (see Section 5.3, “Extended query syntax”);

    • SPH_MATCH_EXTENDED2, an alias for SPH_MATCH_EXTENDED;

    • SPH_MATCH_FULLSCAN, matches query, forcibly using the "full scan" mode as below. NB, any query terms will be ignored, such that filters, filter-ranges and grouping will still be applied, but no text-matching.

    SPH_MATCH_EXTENDED2 was used during 0.9.8 and 0.9.9 development cycle, when the internal matching engine was being rewritten (for the sake of additional functionality and better performance). By 0.9.9-release, the older version was removed, and SPH_MATCH_EXTENDED and SPH_MATCH_EXTENDED2 are now just aliases.

    The SPH_MATCH_FULLSCAN mode will be automatically activated in place of the specified matching mode when the following conditions are met:

    1. The query string is empty (ie. its length is zero).

    2. docinfo storage is set to extern.

    In full scan mode, all the indexed documents will be considered as matching. Such queries will still apply filters, sorting, and group by, but will not perform any full-text searching. This can be useful to unify full-text and non-full-text searching code, or to offload SQL server (there are cases when Sphinx scans will perform better than analogous MySQL queries). An example of using the full scan mode might be to find posts in a forum. By selecting the forum's user ID via SetFilter() but not actually providing any search text, Sphinx will match every document (i.e. every post) where SetFilter() would match - in this case providing every post from that user. By default this will be ordered by relevancy, followed by Sphinx document ID in ascending order (earliest first).

    5.2. Boolean query syntax

    Boolean queries allow the following special operators to be used:

    • explicit operator AND:

      hello & world
    • operator OR:

      hello | world
    • operator NOT:

      hello -world
      hello !world
      

    • grouping:

      ( hello world )

    Here's an example query which uses all these operators:

    Example 5.1. Boolean query example

    ( cat -dog ) | ( cat -mouse)
    


    There always is implicit AND operator, so "hello world" query actually means "hello & world".

    OR operator precedence is higher than AND, so "looking for cat | dog | mouse" means "looking for ( cat | dog | mouse )" and not "(looking for cat) | dog | mouse".

    Queries like "-dog", which implicitly include all documents from the collection, can not be evaluated. This is both for technical and performance reasons. Technically, Sphinx does not always keep a list of all IDs. Performance-wise, when the collection is huge (ie. 10-100M documents), evaluating such queries could take very long.

    5.3. Extended query syntax

    The following special operators and modifiers can be used when using the extended matching mode:

    • operator OR:

      hello | world
    • operator NOT:

      hello -world
      hello !world
      

    • field search operator:

      @title hello @body world
    • field position limit modifier (introduced in version 0.9.9-rc1):

      @body[50] hello
    • multiple-field search operator:

      @(title,body) hello world
    • all-field search operator:

      @* hello
    • phrase search operator:

      "hello world"
    • proximity search operator:

      "hello world"~10
    • quorum matching operator:

      "the world is a wonderful place"/3
    • strict order operator (aka operator "before"):

      aaa << bbb << ccc
    • exact form modifier (introduced in version 0.9.9-rc1):

      raining =cats and =dogs
    • field-start and field-end modifier (introduced in version 0.9.9-rc2):

      ^hello world$
    • NEAR, generalized proximity operator (introduced in version 2.0.1-beta):

      hello NEAR/3 world NEAR/4 "my test"
    • SENTENCE operator (introduced in version 2.0.1-beta):

      all SENTENCE words SENTENCE "in one sentence"
    • PARAGRAPH operator (introduced in version 2.0.1-beta):

      "Bill Gates" PARAGRAPH "Steve Jobs"
    • zone limit operator:

      ZONE:(h3,h4) only in these titles

    Here's an example query that uses some of these operators:

    Example 5.2. Extended matching mode: query example

    "hello world" @title "example program"~5 @body python -(php|perl) @* code
    


    The full meaning of this search is:

    • Find the words 'hello' and 'world' adjacently in any field in a document;

    • Additionally, the same document must also contain the words 'example' and 'program' in the title field, with up to, but not including, 5 words between the words in question; (E.g. "example PHP program" would be matched however "example script to introduce outside data into the correct context for your program" would not because two terms have 5 or more words between them)

    • Additionally, the same document must contain the word 'python' in the body field, but not contain either 'php' or 'perl';

    • Additionally, the same document must contain the word 'code' in any field.

    There always is implicit AND operator, so "hello world" means that both "hello" and "world" must be present in matching document.

    OR operator precedence is higher than AND, so "looking for cat | dog | mouse" means "looking for ( cat | dog | mouse )" and not "(looking for cat) | dog | mouse".

    Field limit operator limits subsequent searching to a given field. Normally, query will fail with an error message if given field name does not exist in the searched index. However, that can be suppressed by specifying "@@relaxed" option at the very beginning of the query:

    @@relaxed @nosuchfield my query
    

    This can be helpful when searching through heterogeneous indexes with different schemas.

    Field position limit, introduced in version 0.9.9-rc1, additionaly restricts the searching to first N position within given field (or fields). For example, "@body[50] hello" will not match the documents where the keyword 'hello' occurs at position 51 and below in the body.

    Proximity distance is specified in words, adjusted for word count, and applies to all words within quotes. For instance, "cat dog mouse"~5 query means that there must be less than 8-word span which contains all 3 words, ie. "CAT aaa bbb ccc DOG eee fff MOUSE" document will not match this query, because this span is exactly 8 words long.

    Quorum matching operator introduces a kind of fuzzy matching. It will only match those documents that pass a given threshold of given words. The example above ("the world is a wonderful place"/3) will match all documents that have at least 3 of the 6 specified words.

    Strict order operator (aka operator "before"), introduced in version 0.9.9-rc2, will match the document only if its argument keywords occur in the document exactly in the query order. For instance, "black << cat" query (without quotes) will match the document "black and white cat" but not the "that cat was black" document. Order operator has the lowest priority. It can be applied both to just keywords and more complex expressions, ie. this is a valid query:

    (bag of words) << "exact phrase" << red|green|blue
    

    Exact form keyword modifier, introduced in version 0.9.9-rc1, will match the document only if the keyword occurred in exactly the specified form. The default behaviour is to match the document if the stemmed keyword matches. For instance, "runs" query will match both the document that contains "runs" and the document that contains "running", because both forms stem to just "run" - while "=runs" query will only match the first document. Exact form operator requires index_exact_words option to be enabled. This is a modifier that affects the keyword and thus can be used within operators such as phrase, proximity, and quorum operators.

    Field-start and field-end keyword modifiers, introduced in version 0.9.9-rc2, will make the keyword match only if it occurred at the very start or the very end of a fulltext field, respectively. For instance, the query "^hello world$" (with quotes and thus combining phrase operator and start/end modifiers) will only match documents that contain at least one field that has exactly these two keywords.

    Starting with 0.9.9-rc1, arbitrarily nested brackets and negations are allowed. However, the query must be possible to compute without involving an implicit list of all documents:

    // correct query
    aaa -(bbb -(ccc ddd))
    
    // queries that are non-computable
    -aaa
    aaa | -bbb
    

    NEAR operator, added in 2.0.1-beta, is a generalized version of a proximity operator. The syntax is NEAR/N, it is case-sensitive, and no spaces are allowed beetwen the NEAR keyword, the slash sign, and the distance value.

    The original proximity operator only worked on sets of keywords. NEAR is more generic and can accept arbitrary subexpressions as its two arguments, matching the document when both subexpressions are found within N words of each other, no matter in which order. NEAR is left associative and has the same (lowest) precedence as BEFORE.

    You should also note how a (one NEAR/7 two NEAR/7 three) query using NEAR is not really equivalent to a ("one two three"~7) one using keyword proximity operator. The difference here is that the proximity operator allows for up to 6 non-matching words between all the 3 matching words, but the version with NEAR is less restrictive: it would allow for up to 6 words between 'one' and 'two' and then for up to 6 more between that two-word matching and a 'three' keyword.

    SENTENCE and PARAGRAPH operators, added in 2.0.1-beta, matches the document when both its arguments are within the same sentence or the same paragraph of text, respectively. The arguments can be either keywords, or phrases, or the instances of the same operator. Here are a few examples:

    one SENTENCE two
    one SENTENCE "two three"
    one SENTENCE "two three" SENTENCE four
    

    The order of the arguments within the sentence or paragraph does not matter. These operators only work on indexes built with index_sp (sentence and paragraph indexing feature) enabled, and revert to a mere AND otherwise. Refer to the index_sp directive documentation for the notes on what's considered a sentence and a paragraph.

    ZONE limit operator, added in 2.0.1-beta, is quite similar to field limit operator, but restricts matching to a given in-field zone or a list of zones. Note that the subsequent subexpressions are not required to match in a single contiguous span of a given zone, and may match in multiple spans. For instance, (ZONE:th hello world) query will match this example document:

    <th>Table 1. Local awareness of Hello Kitty brand.</th>
    .. some table data goes here ..
    <th>Table 2. World-wide brand awareness.</th>
    

    ZONE operator affects the query until the next field or ZONE limit operator, or the closing parenthesis. It only works on the indexes built with zones support (see Section 11.2.9, “index_zones”) and will be ignored otherwise.

    5.4. Search results ranking

    Ranking overview

    Ranking (aka weighting) of the search results can be defined as a process of computing a so-called relevance (aka weight) for every given matched document with regards to a given query that matched it. So relevance is in the end just a number attached to every document that estimates how relevant the document is to the query. Search results can then be sorted based on this number and/or some additional parameters, so that the most sought after results would come up higher on the results page.

    There is no single standard one-size-fits-all way to rank any document in any scenario. Moreover, there can not ever be such a way, because relevance is subjective. As in, what seems relevant to you might not seem relevant to me. Hence, in general case it's not just hard to compute, it's theoretically impossible.

    So ranking in Sphinx is configurable. It has a notion of a so-called ranker. A ranker can formally be defined as a function that takes document and query as its input and produces a relevance value as output. In layman's terms, a ranker controls exactly how (using which specific algorithm) will Sphinx assign weights to the document.

    Previously, this ranking function was rigidly bound to the matching mode. So in the legacy matching modes (that is, SPH_MATCH_ALL, SPH_MATCH_ANY, SPH_MATCH_PHRASE, and SPH_MATCH_BOOLEAN) you can not choose the ranker. You can only do that in the SPH_MATCH_EXTENDED mode. (Which is the only mode in SphinxQL and the suggested mode in SphinxAPI anyway.) To choose a non-default ranker you can either use SetRankingMode() with SphinxAPI, or OPTION ranker clause in SELECT statement when using SphinxQL.

    As a sidenote, legacy matching modes are internally implemented via the unified syntax anyway. When you use one of those modes, Sphinx just internally adjusts the query and sets the associated ranker, then executes the query using the very same unified code path.

    Available rankers

    Sphinx ships with a number of built-in rankers suited for different purposes. A number of them uses two factors, phrase proximity (aka LCS) and BM25. Phrase proximity works on the keyword positions, while BM25 works on the keyword frequencies. Basically, the better the degree of the phrase match between the document body and the query, the higher is the phrase proximity (it maxes out when the document contains the entire query as a verbatim quote). And BM25 is higher when the document contains more rare words. We'll save the detailed discussion for later.

    Currently implemented rankers are:

    • SPH_RANK_PROXIMITY_BM25, the default ranking mode that uses and combines both phrase proximity and BM25 ranking.

    • SPH_RANK_BM25, statistical ranking mode which uses BM25 ranking only (similar to most other full-text engines). This mode is faster but may result in worse quality on queries which contain more than 1 keyword.

    • SPH_RANK_NONE, no ranking mode. This mode is obviously the fastest. A weight of 1 is assigned to all matches. This is sometimes called boolean searching that just matches the documents but does not rank them.

    • SPH_RANK_WORDCOUNT, ranking by the keyword occurrences count. This ranker computes the per-field keyword occurrence counts, then multiplies them by field weights, and sums the resulting values.

    • SPH_RANK_PROXIMITY, added in version 0.9.9-rc1, returns raw phrase proximity value as a result. This mode is internally used to emulate SPH_MATCH_ALL queries.

    • SPH_RANK_MATCHANY, added in version 0.9.9-rc1, returns rank as it was computed in SPH_MATCH_ANY mode ealier, and is internally used to emulate SPH_MATCH_ANY queries.

    • SPH_RANK_FIELDMASK, added in version 0.9.9-rc2, returns a 32-bit mask with N-th bit corresponding to N-th fulltext field, numbering from 0. The bit will only be set when the respective field has any keyword occurences satisfiying the query.

    • SPH_RANK_SPH04, added in version 1.10-beta, is generally based on the default SPH_RANK_PROXIMITY_BM25 ranker, but additionally boosts the matches when they occur in the very beginning or the very end of a text field. Thus, if a field equals the exact query, SPH04 should rank it higher than a field that contains the exact query but is not equal to it. (For instance, when the query is "Hyde Park", a document entitled "Hyde Park" should be ranked higher than a one entitled "Hyde Park, London" or "The Hyde Park Cafe".)

    • SPH_RANK_EXPR, added in version 2.0.2-beta, lets you specify the ranking formula in run time. It exposes a number of internal text factors and lets you define how the final weight should be computed from those factors. You can find more details about its syntax and a reference available factors in a subsection below.

    You should specify the SPH_RANK_ prefix and use capital letters only when using the SetRankingMode() call from the SphinxAPI. The API ports expose these as global constants. Using SphinxQL syntax, the prefix should be omitted and the ranker name is case insensitive. Example:

    // SphinxAPI
    $client->SetRankingMode ( SPH_RANK_SPH04 );
    
    // SphinxQL
    mysql_query ( "SELECT ... OPTION ranker=sph04" );
    

    Legacy matching modes rankers

    Legacy matching modes automatically select a ranker as follows:

    • SPH_MATCH_ALL uses SPH_RANK_PROXIMITY ranker;

    • SPH_MATCH_ANY uses SPH_RANK_MATCHANY ranker;

    • SPH_MATCH_PHRASE uses SPH_RANK_PROXIMITY ranker;

    • SPH_MATCH_BOOLEAN uses SPH_RANK_NONE ranker.

    Expression based ranker (SPH_RANK_EXPR)

    Expression ranker, added in version 2.0.2-beta, lets you change the ranking formula on the fly, on a per-query basis. For a quick kickoff, this is how you emulate PROXIMITY_BM25 ranker using the expression based one:

    SELECT *, WEIGHT() FROM myindex WHERE MATCH('hello world')
    OPTION ranker=expr('sum(lcs*user_weight)*1000+bm25')
    

    The output of this query must not change if you omit the OPTION clause, because the default ranker (PROXIMITY_BM25) behaves exactly like specified in the ranker formula above. But the expression ranker is somewhat more flexible than just that and provides access to many more factors.

    The ranking formula is an arbitrary arithmetic expression that can use constants, document attributes, built-in functions and operators (described in Section 5.5, “Expressions, functions, and operators”), and also a few ranking-specific things that are only accessible in a ranking formula. Namely, those are field aggregation functions, field-level, and document-level ranking factors.

    A document-level factor is a numeric value computed by the ranking engine for every matched document with regards to the current query. (So it differs from a plain document attribute in that the attribute do not depend on the full text query, while factors might.) Those factors can be used anywhere in the ranking expression. Currently implemented document-level factors are:

    • bm25 (integer), a document-level BM25 estimate (computed without keyword occurrence filtering).

    • max_lcs (integer), a query-level maximum possible value that the sum(lcs*user_weight) expression can ever take. This can be useful for weight boost scaling. For instance, MATCHANY ranker formula uses this to guarantee that a full phrase match in any field rankes higher than any combination of partial matches in all fields.

    • field_mask (integer), a document-level 32-bit mask of matched fields.

    • query_word_count (integer), the number of unique keywords in a query, adjusted for a number of excluded keywords. For instance, both (one one one one) and (one !two) queries should assign a value of 1 to this factor, because there is just one unique non-excluded keyword.

    • doc_word_count (integer), the number of unique keywords matched in the entire document.

    A field-level factor is a numeric value computed by the ranking engine for every matched in-document text field with regards to the current query. As more than one field can be matched by a query, but the final weight needs to be a single integer value, these values need to be folded into a single one. To achieve that, field-level factors can only be used within a field aggregation function, they can not be used anywhere in the expression. For example, you can not use (lcs+bm25) as your ranking expression, as lcs takes multiple values (one in every matched field). You should use (sum(lcs)+bm25) instead, that expression sums lcs over all matching fields, and then adds bm25 to that per-field sum. Currently implemented field-level factors are:

    • lcs (integer), the length of a maximum verbatim match between the document and the query, coutned in words. LCS stands for Longest Common Subsequence (or Subset). Takes a minimum value of 1 when only stray keywords were matched in a field, and a maximum value of query keywords count when the entire query was matched in a field verbatim (in the exact query keywords order). For example, if the query is 'hello world' and the field contains these two words quoted from the query (that is, adjacent to each other, and exaclty in the query order), lcs will be 2. For example, if the query is 'hello world program' and the field contains 'hello world', lcs will be 2. Note that any subset of the query keyword works, not just a subset of adjacent keywords. For example, if the query is 'hello world program' and the field contains 'hello (test program)', lcs will be 2 just as well, because both 'hello' and 'program' matched in the same respective positions as they were in the query. Finally, if the query is 'hello world program' and the field contains 'hello world program', lcs will be 3. (Hopefully that is unsurpising at this point.)

    • user_weight (integer), the user specified per-field weight (refer to SetFieldWeights() in SphinxAPI and OPTION field_weights in SphinxQL respectively). The weights default to 1 if not specified explicitly.

    • hit_count (integer), the number of keyword occurrences that matched in the field. Note that a single keyword may occur multiple times. For example, if 'hello' occurs 3 times in a field and 'world' occurs 5 times, hit_count will be 8.

    • word_count (integer), the number of unique keywords matched in the field. For example, if 'hello' and 'world' occur anywhere in a field, word_count will be 2, irregardless of how many times do both keywords occur.

    • tf_idf (float), the sum of TF*IDF over all the keywords matched in the field. IDF is the Inverse Document Frequency, a floating point value between 0 and 1 that describes how frequent is the keywords (basically, 0 for a keyword that occurs in every document indexed, and 1 for a unique keyword that occurs in just a single document). TF is the Term Frequency, the number of matched keyword occurrences in the field. As a side note, tf_idf is actually computed by summing IDF over all matched occurences. That's by construction equivalent to summing TF*IDF over all matched keywords.

    • min_hit_pos (integer), the position of the first matched keyword occurrence, counted in words. Indexing begins from position 1.

    • min_best_span_pos (integer), the position of the first maximum LCS occurrences span. For example, assume that our query was 'hello world program' and 'hello world' subphrase was matched twice in the field, in positions 13 and 21. Assume that 'hello' and 'world' additionally occurred elsewhere in the field, but never next to each other and thus never as a subphrase match. In that case, min_best_span_pos will be 13. Note how for the single keyword queries min_best_span_pos will always equal min_hit_pos.

    • exact_hit (boolean), whether a query was an exact match of the entire current field. Used in the SPH04 ranker.

    A field aggregation function is a single argument function that takes an expression with field-level factors, iterates it over all the matched fields, and computes the final results. Currently implemented field aggregation functions are:

    • sum, sums the argument expression over all matched fields. For instance, sum(1) should return a number of matched fields.

    Expressions for the built-in rankers

    Most of the other rankers can actually be emulated with the expression based ranker. You just need to pass a proper expression. Such emulation is, of course, going to be slower than using the built-in, compiled ranker but still might be of interest if you want to fine-tune your ranking formula starting with one of the existing ones. Also, the formulas define the nitty gritty ranker details in a nicely readable fashion.

    • SPH_RANK_PROXIMITY_BM25 = sum(lcs*user_weight)*1000+bm25

    • SPH_RANK_BM25 = bm25

    • SPH_RANK_NONE = 1

    • SPH_RANK_WORDCOUNT = sum(hit_count*user_weight)

    • SPH_RANK_PROXIMITY = sum(lcs*user_weight)

    • SPH_RANK_MATCHANY = sum((word_count+(lcs-1)*max_lcs)*user_weight)

    • SPH_RANK_FIELDMASK = field_mask

    • SPH_RANK_SPH04 = sum((4*lcs+2*(min_hit_pos==1)+exact_hit)*user_weight)*1000+bm25

    5.5. Expressions, functions, and operators

    Sphinx lets you use arbitrary arithmetic expressions both via SphinxQL and SphinxAPI, involving attribute values, internal attributes (document ID and relevance weight), arithmetic operations, a number of built-in functions, and user-defined functions. This section documents the supported operators and functions. Here's the complete reference list for quick access.

    5.5.1. Operators

    Arithmetic operators: +, -, *, /, %, DIV, MOD

    The standard arithmetic operators. Arithmetic calculations involving those can be performed in three different modes: (a) using single-precision, 32-bit IEEE 754 floating point values (the default), (b) using signed 32-bit integers, (c) using 64-bit signed integers. The expression parser will automatically switch to integer mode if there are no operations the result in a floating point value. Otherwise, it will use the default floating point mode. For instance, a+b will be computed using 32-bit integers if both arguments are 32-bit integers; or using 64-bit integers if both arguments are integers but one of them is 64-bit; or in floats otherwise. However, a/b or sqrt(a) will always be computed in floats, because these operations return a result of non-integer type. To avoid the first, you can either use IDIV(a,b) or a DIV b form. Also, a*b will not be automatically promoted to 64-bit when the arguments are 32-bit. To enforce 64-bit results, you can use BIGINT(). (But note that if there are non-integer operations, BIGINT() will simply be ignored.)

    Comparison operators: <, > <=, >=, =, <>

    Comparison operators (eg. = or <=) return 1.0 when the condition is true and 0.0 otherwise. For instance, (a=b)+3 will evaluate to 4 when attribute 'a' is equal to attribute 'b', and to 3 when 'a' is not. Unlike MySQL, the equality comparisons (ie. = and <> operators) introduce a small equality threshold (1e-6 by default). If the difference between compared values is within the threshold, they will be considered equal.

    Boolean operators: AND, OR, NOT

    Boolean operators (AND, OR, NOT) were introduced in 0.9.9-rc2 and behave as usual. They are left-associative and have the least priority compared to other operators. NOT has more priority than AND and OR but nevertheless less than any other operator. AND and OR have the same priority so brackets use is recommended to avoid confusion in complex expressions.

    Bitwise operators: &, |

    These operators perform bitwise AND and OR respectively. The operands must be of an integer types. Introduced in version 1.10-beta.

    5.5.2. Numeric functions

    ABS()

    Returns the absolute value of the argument.

    CEIL()

    Returns the smallest integer value greater or equal to the argument.

    COS()

    Returns the cosine of the argument.

    EXP()

    Returns the exponent of the argument (e=2.718... to the power of the argument).

    FIBONACCI()

    Returns the N-th Fibonacci number, where N is the integer argument. That is, arguments of 0 and up will generate the values 0, 1, 1, 2, 3, 5, 8, 13 and so on. Note that the computations are done using 32-bit integer math and thus numbers 48th and up will be returned modulo 2^32.

    FLOOR()

    Returns the largest integer value lesser or equal to the argument.

    IDIV()

    Returns the result of an integer division of the first argument by the second argument. Both arguments must be of an integer type.

    LN()

    Returns the natural logarithm of the argument (with the base of e=2.718...).

    LOG10()

    Returns the common logarithm of the argument (with the base of 10).

    LOG2()

    Returns the binary logarithm of the argument (with the base of 2).

    MAX()

    Returns the bigger of two arguments.

    MIN()

    Returns the smaller of two arguments.

    POW()

    Returns the first argument raised to the power of the second argument.

    SIN()

    Returns the sine of the argument.

    SQRT()

    Returns the square root of the argument.

    5.5.3. Date and time functions

    DAY()

    Returns the integer day of month (in 1..31 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta.

    MONTH()

    Returns the integer month (in 1..12 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta.

    NOW()

    Returns the current timestamp as an INTEGER. Introduced in version 0.9.9-rc1.

    YEAR()

    Returns the integer year (in 1969..2038 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta.

    YEARMONTH()

    Returns the integer year and month code (in 196912..203801 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta.

    YEARMONTHDAY()

    Returns the integer year, month, and date code (in 19691231..20380119 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta.

    5.5.4. Type conversion functions

    BIGINT()

    Forcibly promotes the integer argument to 64-bit type, and does nothing on floating point argument. It's intended to help enforce evaluation of certain expressions (such as a*b) in 64-bit mode even though all the arguments are 32-bit. Introduced in version 0.9.9-rc1.

    SINT()

    Forcibly reinterprets its 32-bit unsigned integer argument as signed, and also expands it to 64-bit type (because 32-bit type is unsigned). It's easily illustrated by the following example: 1-2 normally evaluates to 4294967295, but SINT(1-2) evaluates to -1. Introduced in version 1.10-beta.

    5.5.5. Comparison functions

    IF()

    IF() behavior is slightly different that that of its MySQL counterpart. It takes 3 arguments, check whether the 1st argument is equal to 0.0, returns the 2nd argument if it is not zero, or the 3rd one when it is. Note that unlike comparison operators, IF() does not use a threshold! Therefore, it's safe to use comparison results as its 1st argument, but arithmetic operators might produce unexpected results. For instance, the following two calls will produce different results even though they are logically equivalent:

    IF ( sqrt(3)*sqrt(3)-3<>0, a, b )
    IF ( sqrt(3)*sqrt(3)-3, a, b )
    

    In the first case, the comparison operator <> will return 0.0 (false) because of a threshold, and IF() will always return 'b' as a result. In the second one, the same sqrt(3)*sqrt(3)-3 expression will be compared with zero without threshold by the IF() function itself. But its value will be slightly different from zero because of limited floating point calculations precision. Because of that, the comparison with 0.0 done by IF() will not pass, and the second variant will return 'a' as a result.

    IN()

    IN(expr,val1,val2,...), introduced in version 0.9.9-rc1, takes 2 or more arguments, and returns 1 if 1st argument (expr) is equal to any of the other arguments (val1..valN), or 0 otherwise. Currently, all the checked values (but not the expression itself!) are required to be constant. (Its technically possible to implement arbitrary expressions too, and that might be implemented in the future.) Constants are pre-sorted and then binary search is used, so IN() even against a big arbitrary list of constants will be very quick. Starting with 0.9.9-rc2, first argument can also be a MVA attribute. In that case, IN() will return 1 if any of the MVA values is equal to any of the other arguments. Starting with 2.0.1-beta, IN() also supports IN(expr,@uservar) syntax to check whether the value belongs to the list in the given global user variable.

    INTERVAL()

    INTERVAL(expr,point1,point2,point3,...), introduced in version 0.9.9-rc1, takes 2 or more arguments, and returns the index of the argument that is less than the first argument: it returns 0 if expr<point1, 1 if point1<=expr<point2, and so on. It is required that point1<point2<...<pointN for this function to work correctly.

    5.5.6. Miscellaneous functions

    CRC32()

    Returns the CRC32 value of a string argument. Introduced in version 2.0.1-beta.

    GEODIST()

    GEODIST(lat1,long1,lat2,long2) function, introduced in version 0.9.9-rc2, computes geosphere distance between two given points specified by their coordinates. Note that both latitudes and longitudes must be in radians and the result will be in meters. You can use arbitrary expression as any of the four coordinates. An optimized path will be selected when one pair of the arguments refers directly to a pair attributes and the other one is constant.

    5.6. Sorting modes

    There are the following result sorting modes available:

    • SPH_SORT_RELEVANCE mode, that sorts by relevance in descending order (best matches first);

    • SPH_SORT_ATTR_DESC mode, that sorts by an attribute in descending order (bigger attribute values first);

    • SPH_SORT_ATTR_ASC mode, that sorts by an attribute in ascending order (smaller attribute values first);

    • SPH_SORT_TIME_SEGMENTS mode, that sorts by time segments (last hour/day/week/month) in descending order, and then by relevance in descending order;

    • SPH_SORT_EXTENDED mode, that sorts by SQL-like combination of columns in ASC/DESC order;

    • SPH_SORT_EXPR mode, that sorts by an arithmetic expression.

    SPH_SORT_RELEVANCE ignores any additional parameters and always sorts matches by relevance rank. All other modes require an additional sorting clause, with the syntax depending on specific mode. SPH_SORT_ATTR_ASC, SPH_SORT_ATTR_DESC and SPH_SORT_TIME_SEGMENTS modes require simply an attribute name. SPH_SORT_RELEVANCE is equivalent to sorting by "@weight DESC, @id ASC" in extended sorting mode, SPH_SORT_ATTR_ASC is equivalent to "attribute ASC, @weight DESC, @id ASC", and SPH_SORT_ATTR_DESC to "attribute DESC, @weight DESC, @id ASC" respectively.

    SPH_SORT_TIME_SEGMENTS mode

    In SPH_SORT_TIME_SEGMENTS mode, attribute values are split into so-called time segments, and then sorted by time segment first, and by relevance second.

    The segments are calculated according to the current timestamp at the time when the search is performed, so the results would change over time. The segments are as follows:

    • last hour,

    • last day,

    • last week,

    • last month,

    • last 3 months,

    • everything else.

    These segments are hardcoded, but it is trivial to change them if necessary.

    This mode was added to support searching through blogs, news headlines, etc. When using time segments, recent records would be ranked higher because of segment, but withing the same segment, more relevant records would be ranked higher - unlike sorting by just the timestamp attribute, which would not take relevance into account at all.

    SPH_SORT_EXTENDED mode

    In SPH_SORT_EXTENDED mode, you can specify an SQL-like sort expression with up to 5 attributes (including internal attributes), eg:

    @relevance DESC, price ASC, @id DESC
    

    Both internal attributes (that are computed by the engine on the fly) and user attributes that were configured for this index are allowed. Internal attribute names must start with magic @-symbol; user attribute names can be used as is. In the example above, @relevance and @id are internal attributes and price is user-specified.

    Known internal attributes are:

    • @id (match ID)

    • @weight (match weight)

    • @rank (match weight)

    • @relevance (match weight)

    • @random (return results in random order)

    @rank and @relevance are just additional aliases to @weight.

    SPH_SORT_EXPR mode

    Expression sorting mode lets you sort the matches by an arbitrary arithmetic expression, involving attribute values, internal attributes (@id and @weight), arithmetic operations, and a number of built-in functions. Here's an example:

    $cl->SetSortMode ( SPH_SORT_EXPR,
    	"@weight + ( user_karma + ln(pageviews) )*0.1" );
    

    The operators and functions supported in the expressions are discussed in a separate section, Section 5.5, “Expressions, functions, and operators”.

    5.7. Grouping (clustering) search results

    Sometimes it could be useful to group (or in other terms, cluster) search results and/or count per-group match counts - for instance, to draw a nice graph of how much maching blog posts were there per each month; or to group Web search results by site; or to group matching forum posts by author; etc.

    In theory, this could be performed by doing only the full-text search in Sphinx and then using found IDs to group on SQL server side. However, in practice doing this with a big result set (10K-10M matches) would typically kill performance.

    To avoid that, Sphinx offers so-called grouping mode. It is enabled with SetGroupBy() API call. When grouping, all matches are assigned to different groups based on group-by value. This value is computed from specified attribute using one of the following built-in functions:

    • SPH_GROUPBY_DAY, extracts year, month and day in YYYYMMDD format from timestamp;

    • SPH_GROUPBY_WEEK, extracts year and first day of the week number (counting from year start) in YYYYNNN format from timestamp;

    • SPH_GROUPBY_MONTH, extracts month in YYYYMM format from timestamp;

    • SPH_GROUPBY_YEAR, extracts year in YYYY format from timestamp;

    • SPH_GROUPBY_ATTR, uses attribute value itself for grouping.

    The final search result set then contains one best match per group. Grouping function value and per-group match count are returned along as "virtual" attributes named @group and @count respectively.

    The result set is sorted by group-by sorting clause, with the syntax similar to SPH_SORT_EXTENDED sorting clause syntax. In addition to @id and @weight, group-by sorting clause may also include:

    • @group (groupby function value),

    • @count (amount of matches in group).

    The default mode is to sort by groupby value in descending order, ie. by "@group desc".

    On completion, total_found result parameter would contain total amount of matching groups over he whole index.

    WARNING: grouping is done in fixed memory and thus its results are only approximate; so there might be more groups reported in total_found than actually present. @count might also be underestimated. To reduce inaccuracy, one should raise max_matches. If max_matches allows to store all found groups, results will be 100% correct.

    For example, if sorting by relevance and grouping by "published" attribute with SPH_GROUPBY_DAY function, then the result set will contain

    • one most relevant match per each day when there were any matches published,

    • with day number and per-day match count attached,

    • sorted by day number in descending order (ie. recent days first).

    Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported through SetSelect() API call when using GROUP BY.

    5.8. Distributed searching

    To scale well, Sphinx has distributed searching capabilities. Distributed searching is useful to improve query latency (ie. search time) and throughput (ie. max queries/sec) in multi-server, multi-CPU or multi-core environments. This is essential for applications which need to search through huge amounts data (ie. billions of records and terabytes of text).

    The key idea is to horizontally partition (HP) searched data accross search nodes and then process it in parallel.

    Partitioning is done manually. You should

    • setup several instances of Sphinx programs (indexer and searchd) on different servers;

    • make the instances index (and search) different parts of data;

    • configure a special distributed index on some of the searchd instances;

    • and query this index.

    This index only contains references to other local and remote indexes - so it could not be directly reindexed, and you should reindex those indexes which it references instead.

    When searchd receives a query against distributed index, it does the following:

    1. connects to configured remote agents;

    2. issues the query;

    3. sequentially searches configured local indexes (while the remote agents are searching);

    4. retrieves remote agents' search results;

    5. merges all the results together, removing the duplicates;

    6. sends the merged resuls to client.

    From the application's point of view, there are no differences between searching through a regular index, or a distributed index at all. That is, distributed indexes are fully transparent to the application, and actually there's no way to tell whether the index you queried was distributed or local. (Even though as of 0.9.9 Sphinx does not allow to combine searching through distributed indexes with anything else, this constraint will be lifted in the future.)

    Any searchd instance could serve both as a master (which aggregates the results) and a slave (which only does local searching) at the same time. This has a number of uses:

    1. every machine in a cluster could serve as a master which searches the whole cluster, and search requests could be balanced between masters to achieve a kind of HA (high availability) in case any of the nodes fails;

    2. if running within a single multi-CPU or multi-core machine, there would be only 1 searchd instance quering itself as an agent and thus utilizing all CPUs/core.

    It is scheduled to implement better HA support which would allow to specify which agents mirror each other, do health checks, keep track of alive agents, load-balance requests, etc.

    5.9. searchd query log formats

    In version 2.0.1-beta and above two query log formats are supported. Previous versions only supported a custom plain text format. That format is still the default one. However, while it might be more convenient for manual monitoring and review, but hard to replay for benchmarks, it only logs search queries but not the other types of requests, does not always contain the complete search query data, etc. The default text format is also harder (and sometimes impossible) to replay for benchmarking purposes. The new sphinxql format alleviates that. It aims to be complete and automatable, even though at the cost of brevity and readability.

    5.9.1. Plain log format

    By default, searchd logs all succesfully executed search queries into a query log file. Here's an example:

    [Fri Jun 29 21:17:58 2007] 0.004 sec [all/0/rel 35254 (0,20)] [lj] test
    [Fri Jun 29 21:20:34 2007] 0.024 sec [all/0/rel 19886 (0,20) @channel_id] [lj] test
    

    This log format is as follows:

    [query-date] query-time [match-mode/filters-count/sort-mode
        total-matches (offset,limit) @groupby-attr] [index-name] query
    

    Match mode can take one of the following values:

    • "all" for SPH_MATCH_ALL mode;

    • "any" for SPH_MATCH_ANY mode;

    • "phr" for SPH_MATCH_PHRASE mode;

    • "bool" for SPH_MATCH_BOOLEAN mode;

    • "ext" for SPH_MATCH_EXTENDED mode;

    • "ext2" for SPH_MATCH_EXTENDED2 mode;

    • "scan" if the full scan mode was used, either by being specified with SPH_MATCH_FULLSCAN, or if the query was empty (as documented under Matching Modes)

    Sort mode can take one of the following values:

    • "rel" for SPH_SORT_RELEVANCE mode;

    • "attr-" for SPH_SORT_ATTR_DESC mode;

    • "attr+" for SPH_SORT_ATTR_ASC mode;

    • "tsegs" for SPH_SORT_TIME_SEGMENTS mode;

    • "ext" for SPH_SORT_EXTENDED mode.

    Additionally, if searchd was started with --iostats, there will be a block of data after where the index(es) searched are listed.

    A query log entry might take the form of:

    [Fri Jun 29 21:17:58 2007] 0.004 sec [all/0/rel 35254 (0,20)] [lj]
       [ios=6 kb=111.1 ms=0.5] test
    

    This additional block is information regarding I/O operations in performing the search: the number of file I/O operations carried out, the amount of data in kilobytes read from the index files and time spent on I/O operations (although there is a background processing component, the bulk of this time is the I/O operation time).

    5.9.2. SphinxQL log format

    This is a new log format introduced in 2.0.1-beta, with the goals begin logging everything and then some, and in a format easy to automate (for insance, automatically replay). New format can either be enabled via the query_log_format directive in the configuration file, or switched back and forth on the fly with the SET GLOBAL query_log_format=... statement via SphinxQL. In the new format, the example from the previous section would look as follows. (Wrapped below for readability, but with just one query per line in the actual log.)

    /* Fri Jun 29 21:17:58.609 2007 2011 conn 2 wall 0.004 found 35254 */
    SELECT * FROM lj WHERE MATCH('test') OPTION ranker=proximity;
    
    /* Fri Jun 29 21:20:34 2007.555 conn 3 wall 0.024 found 19886 */
    SELECT * FROM lj WHERE MATCH('test') GROUP BY channel_id
    OPTION ranker=proximity;
    

    Note that all requests would be logged in this format, including those sent via SphinxAPI and SphinxSE, not just those sent via SphinxQL. Also note, that this kind of logging works only with plain log files and will not work if you use 'syslog' for logging.

    The features of SphinxQL log format compared to the default text one are as follows.

    • All request types should be logged. (This is still work in progress.)

    • Full statement data will be logged where possible.

    • Errors and warnings are logged.

    • The log should be automatically replayable via SphinxQL.

    • Additional performance counters (currently, per-agent distributed query times) are logged.

    Every request (including both SphinxAPI and SphinxQL) request must result in exactly one log line. All request types, including INSERT, CALL SNIPPETS, etc will eventually get logged, though as of time of this writing, that is a work in progress). Every log line must be a valid SphinxQL statement that reconstructs the full request, except if the logged request is too big and needs shortening for performance reasons. Additional messages, counters, etc can be logged in the comments section after the request.

    5.10. MySQL protocol support and SphinxQL

    Starting with version 0.9.9-rc2, Sphinx searchd daemon supports MySQL binary network protocol and can be accessed with regular MySQL API. For instance, 'mysql' CLI client program works well. Here's an example of querying Sphinx using MySQL client:

    $ mysql -P 9306
    Welcome to the MySQL monitor.  Commands end with ; or \g.
    Your MySQL connection id is 1
    Server version: 0.9.9-dev (r1734)
    
    Type 'help;' or '\h' for help. Type '\c' to clear the buffer.
    
    mysql> SELECT * FROM test1 WHERE MATCH('test') 
        -> ORDER BY group_id ASC OPTION ranker=bm25;
    +------+--------+----------+------------+
    | id   | weight | group_id | date_added |
    +------+--------+----------+------------+
    |    4 |   1442 |        2 | 1231721236 |
    |    2 |   2421 |      123 | 1231721236 |
    |    1 |   2421 |      456 | 1231721236 |
    +------+--------+----------+------------+
    3 rows in set (0.00 sec)
    

    Note that mysqld was not even running on the test machine. Everything was handled by searchd itself.

    The new access method is supported in addition to native APIs which all still work perfectly well. In fact, both access methods can be used at the same time. Also, native API is still the default access method. MySQL protocol support needs to be additionally configured. This is a matter of 1-line config change, adding a new listener with mysql41 specified as a protocol:

    listen = localhost:9306:mysql41
    

    Just supporting the protocol and not the SQL syntax would be useless so Sphinx now also supports a subset of SQL that we dubbed SphinxQL. It supports the standard querying all the index types with SELECT, modifying RT indexes with INSERT, REPLACE, and DELETE, and much more. Full SphinxQL reference is available in Chapter 7, SphinxQL reference.

    5.11. Multi-queries

    Multi-queries, or query batches, let you send multiple queries to Sphinx in one go (more formally, one network request).

    Two API methods that implement multi-query mechanism are AddQuery() and RunQueries(). You can also run multiple queries with SphinxQL, see Section 7.21, “Multi-statement queries”. (In fact, regular Query() call is internally implemented as a single AddQuery() call immediately followed by RunQueries() call.) AddQuery() captures the current state of all the query settings set by previous API calls, and memorizes the query. RunQueries() actually sends all the memorized queries, and returns multiple result sets. There are no restrictions on the queries at all, except just a sanity check on a number of queries in a single batch (see Section 11.4.25, “max_batch_queries”).

    Why use multi-queries? Generally, it all boils down to performance. First, by sending requests to searchd in a batch instead of one by one, you always save a bit by doing less network roundtrips. Second, and somewhat more important, sending queries in a batch enables searchd to perform certain internal optimizations. As new types of optimizations are being added over time, it generally makes sense to pack all the queries into batches where possible, so that simply upgrading Sphinx to a new version would automatically enable new optimizations. In the case when there aren't any possible batch optimizations to apply, queries will be processed one by one internally.

    Why (or rather when) not use multi-queries? Multi-queries requires all the queries in a batch to be independent, and sometimes they aren't. That is, sometimes query B is based on query A results, and so can only be set up after executing query A. For instance, you might want to display results from a secondary index if and only if there were no results found in a primary index. Or maybe just specify offset into 2nd result set based on the amount of matches in the 1st result set. In that case, you will have to use separate queries (or separate batches).

    As of 0.9.10, there are two major optimizations to be aware of: common query optimization (available since 0.9.8); and common subtree optimization (available since 0.9.10).

    Common query optimization means that searchd will identify all those queries in a batch where only the sorting and group-by settings differ, and only perform searching once. For instance, if a batch consists of 3 queries, all of them are for "ipod nano", but 1st query requests top-10 results sorted by price, 2nd query groups by vendor ID and requests top-5 vendors sorted by rating, and 3rd query requests max price, full-text search for "ipod nano" will only be performed once, and its results will be reused to build 3 different result sets.

    So-called faceted searching is a particularly important case that benefits from this optimization. Indeed, faceted searching can be implemented by running a number of queries, one to retrieve search results themselves, and a few other ones with same full-text query but different group-by settings to retrieve all the required groups of results (top-3 authors, top-5 vendors, etc). And as long as full-text query and filtering settings stay the same, common query optimization will trigger, and greatly improve performance.

    Common subtree optimization is even more interesting. It lets searchd exploit similarities between batched full-text queries. It identifies common full-text query parts (subtress) in all queries, and caches them between queries. For instance, look at the following query batch:

    barack obama president
    barack obama john mccain
    barack obama speech
    

    There's a common two-word part ("barack obama") that can be computed only once, then cached and shared across the queries. And common subtree optimization does just that. Per-query cache size is strictly controlled by subtree_docs_cache and subtree_hits_cache directives (so that caching all sxiteen gazillions of documents that match "i am" does not exhaust the RAM and instantly kill your server).

    Here's a code sample (in PHP) that fire the same query in 3 different sorting modes:

    require ( "sphinxapi.php" );
    $cl = new SphinxClient ();
    $cl->SetMatchMode ( SPH_MATCH_EXTENDED );
    
    $cl->SetSortMode ( SPH_SORT_RELEVANCE );
    $cl->AddQuery ( "the", "lj" );
    $cl->SetSortMode ( SPH_SORT_EXTENDED, "published desc" );
    $cl->AddQuery ( "the", "lj" );
    $cl->SetSortMode ( SPH_SORT_EXTENDED, "published asc" );
    $cl->AddQuery ( "the", "lj" );
    $res = $cl->RunQueries();
    

    How to tell whether the queries in the batch were actually optimized? If they were, respective query log will have a "multiplier" field that specifies how many queries were processed together:

    [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/rel 747541 (0,20)] [lj] the
    [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/ext 747541 (0,20)] [lj] the
    [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/ext 747541 (0,20)] [lj] the
    

    Note the "x3" field. It means that this query was optimized and processed in a sub-batch of 3 queries. For reference, this is how the regular log would look like if the queries were not batched:

    [Sun Jul 12 15:18:17.062 2009] 0.059 sec [ext/0/rel 747541 (0,20)] [lj] the
    [Sun Jul 12 15:18:17.156 2009] 0.091 sec [ext/0/ext 747541 (0,20)] [lj] the
    [Sun Jul 12 15:18:17.250 2009] 0.092 sec [ext/0/ext 747541 (0,20)] [lj] the
    

    Note how per-query time in multi-query case was improved by a factor of 1.5x to 2.3x, depending on a particular sorting mode. In fact, for both common query and common subtree optimizations, there were reports of 3x and even more improvements, and that's from production instances, not just synthetic tests.

    5.12. Collations

    Introduced to Sphinx in version 2.0.1-beta to supplement string sorting, collations essentially affect the string attribute comparisons. They specify both the character set encoding and the strategy that Sphinx uses to compare strings when doing ORDER BY or GROUP BY with a string attribute involved.

    String attributes are stored as is when indexing, and no character set or language information is attached to them. That's okay as long as Sphinx only needs to store and return the strings to the calling application verbatim. But when you ask Sphinx to sort by a string value, that request immediately becomes quite ambiguous.

    First, single-byte (ASCII, or ISO-8859-1, or Windows-1251) strings need to be processed differently that the UTF-8 ones that may encode every character with a variable number of bytes. So we need to know what is the character set type to interepret the raw bytes as meaningful characters properly.

    Second, we additionally need to know the language-specific string sorting rules. For instance, when sorting according to US rules in en_US locale, the accented character 'ï' (small letter i with diaeresis) should be placed somewhere after 'z'. However, when sorting with French rules and fr_FR locale in mind, it should be placed between 'i' and 'j'. And some other set of rules might choose to ignore accents at all, allowing 'ï' and 'i' to be mixed arbitrarily.

    Third, but not least, we might need case-sensitive sorting in some scenarios and case-insensitive sorting in some others.

    Collations combine all of the above: the character set, the lanugage rules, and the case sensitivity. Sphinx currently provides the following four collations.

    1. libc_ci

    2. libc_cs

    3. utf8_general_ci

    4. binary

    The first two collations rely on several standard C library (libc) calls and can thus support any locale that is installed on your system. They provide case-insensitive (_ci) and case-sensitive (_cs) comparisons respectively. By default they will use C locale, effectively resorting to bytewise comparisons. To change that, you need to specify a different available locale using collation_libc_locale directive. The list of locales available on your system can usually be obtained with the locale command:

    $ locale -a
    C
    en_AG
    en_AU.utf8
    en_BW.utf8
    en_CA.utf8
    en_DK.utf8
    en_GB.utf8
    en_HK.utf8
    en_IE.utf8
    en_IN
    en_NG
    en_NZ.utf8
    en_PH.utf8
    en_SG.utf8
    en_US.utf8
    en_ZA.utf8
    en_ZW.utf8
    es_ES
    fr_FR
    POSIX
    ru_RU.utf8
    ru_UA.utf8
    

    The specific list of the system locales may vary. Consult your OS documentation to install additional needed locales.

    utf8_general_ci and binary locales are built-in into Sphinx. The first one is a generic collation for UTF-8 data (without any so-called language tailoring); it should behave similar to utf8_general_ci collation in MySQL. The second one is a simple bytewise comparison.

    Collation can be overriden via SphinxQL on a per-session basis using SET collation_connection statement. All subsequent SphinxQL queries will use this collation. SphinxAPI and SphinxSE queries will use the server default collation, as specified in collation_server configuration directive. Sphinx currently defaults to libc_ci collation.

    Collations should affect all string attribute comparisons, including those within ORDER BY and GROUP BY, so differently ordered or grouped results can be returned depending on the collation chosen.

    5.13. User-defined functions (UDF)

    Starting with 2.0.1-beta, Sphinx supports User-Defined Functions, or UDF for short. They can be loaded and unloaded dynamically into searchd without having to restart the daemon, and used in expressions when searching. UDF features at a glance are as follows.

    • Functions can take integer (both 32-bit and 64-bit), float, string, or MVA arguments.

    • Functions can return integer or float values.

    • Functions can check the argument number, types, and names and raise errors.

    • Only simple functions (that is, non-aggregate ones) are currently supported.

    User-defined functions need your OS to support dynamically loadable libraries (aka shared objects). Most of the modern OSes are eligible, including Linux, Windows, MacOS, Solaris, BSD and others. (The internal testing has been done on Linux and Windows.) The UDF libraries must reside in a directory specified by plugin_dir directive, and the server must be configured to use workers = threads mode. Relative paths to the library files are not allowed. Once the library is succesfully built and copied to the trusted location, you can then dynamically install and deinstall the functions using CREATE FUNCTION and DROP FUNCTION statements respectively. A single library can contain multiple functions. A library gets loaded when you first install a function from it, and unloaded when you deinstall all the functions from that library.

    The library functions that will implement a UDF visible to SQL statements need to follow C calling convention, and a simple naming convention. Sphinx source distribution provides a sample file, src/udfexample.c, that defines a few simple functions showing how to work with integer, string, and MVA arguments; you can use that one as a foundation for your new functions. It includes the UDF interface header file, src/sphinxudf.h, that defines the required types and structures. sphinxudf.h header is standalone, that is, does not require any other parts of Sphinx source to compile.

    Every function that you intend to use in your SELECT statements requires at least two corresponding C/C++ functions: the initialization call, and the function call itself. You can also optionally define the deinitialization call if your function requires any post-query cleanup. (For instance, if you were allocating any memory in either the initialization call or the function calls.) Function names in SQL are case insensitive, C function names are not. They need to be all lower-case. Mistakes in function name prevent UDFs from loading. You also have to pay special attention to the calling convention used when compiling, the list and the types of arguments, and the return type of the main function call. Mistakes in either are likely to crash the server, or result in unexpected results in the best case. Last but not least, all functions need to be thread-safe.

    Let's assume for the sake of example that your UDF name in SphinxQL will be MYFUNC. The initialization, main, and deinitialization functions would then need to be named as follows and take the following arguments:

    /// initialization function
    /// called once during query initialization
    /// returns 0 on success
    /// returns non-zero and fills error_message buffer on failure
    int myfunc_init ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args,
                      char * error_message );
    
    /// main call function
    /// returns the computed value
    /// writes non-zero value into error_flag to indicate errors
    RETURN_TYPE myfunc ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args,
                         char * error_flag );
    
    /// optional deinitialization function
    /// called once to cleanup once query processing is done
    void myfunc_deinit ( SPH_UDF_INIT * init );
    

    The two mentioned structures, SPH_UDF_INIT and SPH_UDF_ARGS, are defined in the src/sphinxudf.h interface header and documented there. RETURN_TYPE of the main function must be one of the following:

    • int for the functions that return INT.

    • sphinx_int64_t for the functions that return BIGINT.

    • float for the functions that return FLOAT.

    The calling sequence is as follows. myfunc_init() is called once when initializing the query. It can return a non-zero code to indicate a failure; in that case query is not executed, and the error message from the error_message buffer is returned. Otherwise, myfunc() is be called for every row, and a myfunc_deinit() is then called when the query ends. myfunc() can indicate an error by writing a non-zero byte value to error_flag, in that case, it will no more be called for subsequent rows, and a default value of 0 will be substituted. Sphinx might or might not choose to terminate such queries early, neither behavior is currently guaranteed.

    Chapter 6. Command line tools reference

    As mentioned elsewhere, Sphinx is not a single program called 'sphinx', but a collection of 4 separate programs which collectively form Sphinx. This section covers these tools and how to use them.

    6.1. indexer command reference

    indexer is the first of the two principle tools as part of Sphinx. Invoked from either the command line directly, or as part of a larger script, indexer is solely responsible for gathering the data that will be searchable.

    The calling syntax for indexer is as follows:

    indexer [OPTIONS] [indexname1 [indexname2 [...]]]
    

    Essentially you would list the different possible indexes (that you would later make available to search) in sphinx.conf, so when calling indexer, as a minimum you need to be telling it what index (or indexes) you want to index.

    If sphinx.conf contained details on 2 indexes, mybigindex and mysmallindex, you could do the following:

    $ indexer mybigindex
    $ indexer mysmallindex mybigindex
    

    As part of the configuration file, sphinx.conf, you specify one or more indexes for your data. You might call indexer to reindex one of them, ad-hoc, or you can tell it to process all indexes - you are not limited to calling just one, or all at once, you can always pick some combination of the available indexes.

    The majority of the options for indexer are given in the configuration file, however there are some options you might need to specify on the command line as well, as they can affect how the indexing operation is performed. These options are:

    • --config <file> (-c <file> for short) tells indexer to use the given file as its configuration. Normally, it will look for sphinx.conf in the installation directory (e.g. /usr/local/sphinx/etc/sphinx.conf if installed into /usr/local/sphinx), followed by the current directory you are in when calling indexer from the shell. This is most of use in shared environments where the binary files are installed somewhere like /usr/local/sphinx/ but you want to provide users with the ability to make their own custom Sphinx set-ups, or if you want to run multiple instances on a single server. In cases like those you could allow them to create their own sphinx.conf files and pass them to indexer with this option. For example:

      $ indexer --config /home/myuser/sphinx.conf myindex
      

    • --all tells indexer to update every index listed in sphinx.conf, instead of listing individual indexes. This would be useful in small configurations, or cron-type or maintenance jobs where the entire index set will get rebuilt each day, or week, or whatever period is best. Example usage:

      $ indexer --config /home/myuser/sphinx.conf --all
      

    • --rotate is used for rotating indexes. Unless you have the situation where you can take the search function offline without troubling users, you will almost certainly need to keep search running whilst indexing new documents. --rotate creates a second index, parallel to the first (in the same place, simply including .new in the filenames). Once complete, indexer notifies searchd via sending the SIGHUP signal, and searchd will attempt to rename the indexes (renaming the existing ones to include .old and renaming the .new to replace them), and then start serving from the newer files. Depending on the setting of seamless_rotate, there may be a slight delay in being able to search the newer indexes. Example usage:

      $ indexer --rotate --all
      

    • --quiet tells indexer not to output anything, unless there is an error. Again, most used for cron-type, or other script jobs where the output is irrelevant or unnecessary, except in the event of some kind of error. Example usage:

      $ indexer --rotate --all --quiet
      

    • --noprogress does not display progress details as they occur; instead, the final status details (such as documents indexed, speed of indexing and so on are only reported at completion of indexing. In instances where the script is not being run on a console (or 'tty'), this will be on by default. Example usage:

      $ indexer --rotate --all --noprogress
      

    • --buildstops <outputfile.text> <N> reviews the index source, as if it were indexing the data, and produces a list of the terms that are being indexed. In other words, it produces a list of all the searchable terms that are becoming part of the index. Note; it does not update the index in question, it simply processes the data 'as if' it were indexing, including running queries defined with sql_query_pre or sql_query_post. outputfile.txt will contain the list of words, one per line, sorted by frequency with most frequent first, and N specifies the maximum number of words that will be listed; if sufficiently large to encompass every word in the index, only that many words will be returned. Such a dictionary list could be used for client application features around "Did you mean..." functionality, usually in conjunction with --buildfreqs, below. Example:

      $ indexer myindex --buildstops word_freq.txt 1000
      

      This would produce a document in the current directory, word_freq.txt with the 1,000 most common words in 'myindex', ordered by most common first. Note that the file will pertain to the last index indexed when specified with multiple indexes or --all (i.e. the last one listed in the configuration file)

    • --buildfreqs works with --buildstops (and is ignored if --buildstops is not specified). As --buildstops provides the list of words used within the index, --buildfreqs adds the quantity present in the index, which would be useful in establishing whether certain words should be considered stopwords if they are too prevalent. It will also help with developing "Did you mean..." features where you can how much more common a given word compared to another, similar one. Example:

      $ indexer myindex --buildstops word_freq.txt 1000 --buildfreqs
      

      This would produce the word_freq.txt as above, however after each word would be the number of times it occurred in the index in question.

    • --merge <dst-index> <src-index> is used for physically merging indexes together, for example if you have a main+delta scheme, where the main index rarely changes, but the delta index is rebuilt frequently, and --merge would be used to combine the two. The operation moves from right to left - the contents of src-index get examined and physically combined with the contents of dst-index and the result is left in dst-index. In pseudo-code, it might be expressed as: dst-index += src-index An example:

      $ indexer --merge main delta --rotate
      

      In the above example, where the main is the master, rarely modified index, and delta is the less frequently modified one, you might use the above to call indexer to combine the contents of the delta into the main index and rotate the indexes.

    • --merge-dst-range <attr> <min> <max> runs the filter range given upon merging. Specifically, as the merge is applied to the destination index (as part of --merge, and is ignored if --merge is not specified), indexer will also filter the documents ending up in the destination index, and only documents will pass through the filter given will end up in the final index. This could be used for example, in an index where there is a 'deleted' attribute, where 0 means 'not deleted'. Such an index could be merged with:

      $ indexer --merge main delta --merge-dst-range deleted 0 0
      

      Any documents marked as deleted (value 1) would be removed from the newly-merged destination index. It can be added several times to the command line, to add successive filters to the merge, all of which must be met in order for a document to become part of the final index.

    • --dump-rows <FILE> dumps rows fetched by SQL source(s) into the specified file, in a MySQL compatible syntax. Resulting dumps are the exact representation of data as received by indexer and help to repeat indexing-time issues.

    • --verbose guarantees that every row that caused problems indexing (duplicate, zero, or missing document ID; or file field IO issues; etc) will be reported. By default, this option is off, and problem summaries may be reported instead.

    • --sighup-each is useful when you are rebuilding many big indexes, and want each one rotated into searchd as soon as possible. With --sighup-each, indexer will send a SIGHUP signal to searchd after succesfully completing the work on each index. (The default behavior is to send a single SIGHUP after all the indexes were built.)

    • --print-queries prints out SQL queries that indexer sends to the database, along with SQL connection and disconnection events. That is useful to diagnose and fix problems with SQL sources.

    6.2. searchd command reference

    searchd is the second of the two principle tools as part of Sphinx. searchd is the part of the system which actually handles searches; it functions as a server and is responsible for receiving queries, processing them and returning a dataset back to the different APIs for client applications.

    Unlike indexer, searchd is not designed to be run either from a regular script or command-line calling, but instead either as a daemon to be called from init.d (on Unix/Linux type systems) or to be called as a service (on Windows-type systems), so not all of the command line options will always apply, and so will be build-dependent.

    Calling searchd is simply a case of:

    $ searchd [OPTIONS]
    

    The options available to searchd on all builds are:

    • --help (-h for short) lists all of the parameters that can be called in your particular build of searchd.

    • --config <file> (-c <file> for short) tells searchd to use the given file as its configuration, just as with indexer above.

    • --stop is used to asynchronously stop searchd, using the details of the PID file as specified in the sphinx.conf file, so you may also need to confirm to searchd which configuration file to use with the --config option. NB, calling --stop will also make sure any changes applied to the indexes with UpdateAttributes() will be applied to the index files themselves. Example:

      $ searchd --config /home/myuser/sphinx.conf --stop
      

    • --stopwait is used to synchronously stop searchd. --stop essentially tells the running instance to exit (by sending it a SIGTERM) and then immediately returns. --stopwait will also attempt to wait until the running searchd instance actually finishes the shutdown (eg. saves all the pending attribute changes) and exits. Example:

      $ searchd --config /home/myuser/sphinx.conf --stopwait
      

      Possible exit codes are as follows:

      • 0 on success;

      • 1 if connection to running searchd daemon failed;

      • 2 if daemon reported an error during shutdown;

      • 3 if daemon crashed during shutdown.

    • --status command is used to query running searchd instance status, using the connection details from the (optionally) provided configuration file. It will try to connect to the running instance using the first configured UNIX socket or TCP port. On success, it will query for a number of status and performance counter values and print them. You can use Status() API call to access the very same counters from your application. Examples:

      $ searchd --status
      $ searchd --config /home/myuser/sphinx.conf --status
      

    • --pidfile is used to explicitly state a PID file, where the process information is stored regarding searchd, used for inter-process communications (for example, indexer will need to know the PID to contact searchd for rotating indexes). Normally, searchd would use a PID if running in regular mode (i.e. not with --console), but it is possible that you will be running it in console mode whilst the index is being updated and rotated, for which a PID file will be needed.

      $ searchd --config /home/myuser/sphinx.conf --pidfile /home/myuser/sphinx.pid
      

    • --console is used to force searchd into console mode; typically it will be running as a conventional server application, and will aim to dump information into the log files (as specified in sphinx.conf). Sometimes though, when debugging issues in the configuration or the daemon itself, or trying to diagnose hard-to-track-down problems, it may be easier to force it to dump information directly to the console/command line from which it is being called. Running in console mode also means that the process will not be forked (so searches are done in sequence) and logs will not be written to. (It should be noted that console mode is not the intended method for running searchd.) You can invoke it as such:

      $ searchd --config /home/myuser/sphinx.conf --console
      

    • --logdebug enables additional debug output in the daemon log. Should only be needed rarely, to assist with debugging issues that could not be easily reproduced on request.

    • --iostats is used in conjuction with the logging options (the query_log will need to have been activated in sphinx.conf) to provide more detailed information on a per-query basis as to the input/output operations carried out in the course of that query, with a slight performance hit and of course bigger logs. Further details are available under the query log format section. You might start searchd thus:

      $ searchd --config /home/myuser/sphinx.conf --iostats
      

    • --cpustats is used to provide actual CPU time report (in addition to wall time) in both query log file (for every given query) and status report (aggregated). It depends on clock_gettime() system call and might therefore be unavailable on certain systems. You might start searchd thus:

      $ searchd --config /home/myuser/sphinx.conf --cpustats
      

    • --port portnumber (-p for short) is used to specify the port that searchd should listen on, usually for debugging purposes. This will usually default to 9312, but sometimes you need to run it on a different port. Specifying it on the command line will override anything specified in the configuration file. The valid range is 0 to 65535, but ports numbered 1024 and below usually require a privileged account in order to run. An example of usage:

      $ searchd --port 9313
      

    • --listen ( address ":" port | port | path ) [ ":" protocol ] (or -l for short) Works as --port, but allow you to specify not only the port, but full path, as IP address and port, or Unix-domain socket path, that searchd will listen on. Otherwords, you can specify either an IP address (or hostname) and port number, or just a port number, or Unix socket path. If you specify port number but not the address, searchd will listen on all network interfaces. Unix path is identified by a leading slash. As the last param you can also specify a protocol handler (listener) to be used for connections on this socket. Supported protocol values are 'sphinx' (Sphinx 0.9.x API protocol) and 'mysql41' (MySQL protocol used since 4.1 upto at least 5.1).

    • --index <index> (or -i <index> for short) forces this instance of searchd only to serve the specified index. Like --port, above, this is usually for debugging purposes; more long-term changes would generally be applied to the configuration file itself. Example usage:

      $ searchd --index myindex
      

    • --strip-path strips the path names from all the file names referenced from the index (stopwords, wordforms, exceptions, etc). This is useful for picking up indexes built on another machine with possibly different path layouts.

    • --replay-flags=<OPTIONS> switch, added in version 2.0.2-beta, can be used to specify a list of extra binary log replay options. The supported options are:

      • accept-desc-timestamp, ignore descending transaction timestamps and replay such transactions anyway (the default behavior is to exit with an error).

      Example:

      $ searchd --replay-flags=accept-desc-timestamp
      

    There are some options for searchd that are specific to Windows platforms, concerning handling as a service, are only be available on Windows binaries.

    Note that on Windows searchd will default to --console mode, unless you install it as a service.

    • --install installs searchd as a service into the Microsoft Management Console (Control Panel / Administrative Tools / Services). Any other parameters specified on the command line, where --install is specified will also become part of the command line on future starts of the service. For example, as part of calling searchd, you will likely also need to specify the configuration file with --config, and you would do that as well as specifying --install. Once called, the usual start/stop facilities will become available via the management console, so any methods you could use for starting, stopping and restarting services would also apply to searchd. Example:

      C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install
         --config C:\Sphinx\sphinx.conf
      

      If you wanted to have the I/O stats every time you started searchd, you would specify its option on the same line as the --install command thus:

      C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install
         --config C:\Sphinx\sphinx.conf --iostats
      

    • --delete removes the service from the Microsoft Management Console and other places where services are registered, after previously installed with --install. Note, this does not uninstall the software or delete the indexes. It means the service will not be called from the services systems, and will not be started on the machine's next start. If currently running as a service, the current instance will not be terminated (until the next reboot, or searchd is called with --stop). If the service was installed with a custom name (with --servicename), the same name will need to be specified with --servicename when calling to uninstall. Example:

      C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --delete
      

    • --servicename <name> applies the given name to searchd when installing or deleting the service, as would appear in the Management Console; this will default to searchd, but if being deployed on servers where multiple administrators may log into the system, or a system with multiple searchd instances, a more descriptive name may be applicable. Note that unless combined with --install or --delete, this option does not do anything. Example:

      C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install
         --config C:\Sphinx\sphinx.conf --servicename SphinxSearch
      

    • --ntservice is the option that is passed by the Management Console to searchd to invoke it as a service on Windows platforms. It would not normally be necessary to call this directly; this would normally be called by Windows when the service would be started, although if you wanted to call this as a regular service from the command-line (as the complement to --console) you could do so in theory.

    Last but not least, as every other daemon, searchd supports a number of signals.

    SIGTERM

    Initiates a clean shutdown. New queries will not be handled; but queries that are already started will not be forcibly interrupted.

    SIGHUP

    Initiates index rotation. Depending on the value of seamless_rotate setting, new queries might be shortly stalled; clients will receive temporary errors.

    SIGUSR1

    Forces reopen of searchd log and query log files, letting you implement log file rotation.

    6.3. search command reference

    search is one of the helper tools within the Sphinx package. Whereas searchd is responsible for searches in a server-type environment, search is aimed at testing the index from the command line, and testing the index quickly without building a framework to make the connection to the server and process its response.

    Note: search is not intended to be deployed as part of a client application; it is strongly recommended you do not write an interface to search instead of searchd, and none of the bundled client APIs support this method. (In any event, search will reload files each time, whereas searchd will cache them in memory for performance.)

    That said, many types of query that you could build in the APIs could also be made with search, however for very complex searches it may be easier to construct them using a small script and the corresponding API. Additionally, some newer features may be available in the searchd system that have not yet been brought into search.

    The calling syntax for search is as follows:

    search [OPTIONS] word1 [word2 [word3 [...]]]
    

    When calling search, it is not necessary to have searchd running; simply make sure that the account running the search program has read access to the configuration file and the index files.

    The default behaviour is to apply a search for word1 (AND word2 AND word3... as specified) to all fields in all indexes as given in the configuration file. If constructing the equivalent in the API, this would be the equivalent to passing SPH_MATCH_ALL to SetMatchMode, and specifying * as the indexes to query as part of Query.

    There are many options available to search. Firstly, the general options:

    • --config <file> (-c <file> for short) tells search to use the given file as its configuration, just as with indexer above.

    • --index <index> (-i <index> for short) tells search to limit searching to the specified index only; normally it would attempt to search all of the physical indexes listed in sphinx.conf, not any distributed ones.

    • --stdin tells search to accept the query from the standard input, rather than the command line. This can be useful for testing purposes whereby you could feed input via pipes and from scripts.

    Options for setting matches:

    • --any (-a for short) changes the matching mode to match any of the words as part of the query (word1 OR word2 OR word3). In the API this would be equivalent to passing SPH_MATCH_ANY to SetMatchMode.

    • --phrase (-p for short) changes the matching mode to match all of the words as part of the query, and do so in the phrase given (not including punctuation). In the API this would be equivalent to passing SPH_MATCH_PHRASE to SetMatchMode.

    • --boolean (-b for short) changes the matching mode to Boolean matching. Note if using Boolean syntax matching on the command line, you may need to escape the symbols (with a backslash) to avoid the shell/command line processor applying them, such as ampersands being escaped on a Unix/Linux system to avoid it forking to the search process, although this can be resolved by using --stdin, as below. In the API this would be equivalent to passing SPH_MATCH_BOOLEAN to SetMatchMode.

    • --ext (-e for short) changes the matching mode to extended matching which provides various text querying operators. In the API this would be equivalent to passing SPH_MATCH_EXTENDED to SetMatchMode.

    • --filter <attr> <v> (-f <attr> <v> for short) filters the results such that only documents where the attribute given (attr) matches the value given (v). For example, --filter deleted 0 only matches documents with an attribute called 'deleted' where its value is 0. You can also add multiple filters on the command line, by specifying multiple --filter multiple times, however if you apply a second filter to an attribute it will override the first defined filter.

    Options for handling the results:

    • --limit <count> (-l count for short) limits the total number of matches back to the number given. If a 'group' is specified, this will be the number of grouped results. This defaults to 20 results if not specified (as do the APIs)

    • --offset <count> (-o <count> for short) offsets the result list by the number of places set by the count; this would be used for pagination through results, where if you have 20 results per 'page', the second page would begin at offset 20, the third page at offset 40, etc.

    • --group <attr> (-g <attr> for short) specifies that results should be grouped together based on the attribute specified. Like the GROUP BY clause in SQL, it will combine all results where the attribute given matches, and returns a set of results where each returned result is the best from each group. Unless otherwise specified, this will be the best match on relevance.

    • --groupsort <expr> (-gs <expr> for short) instructs that when results are grouped with --group, the expression given in <expr> shall determine the order of the groups. Note, this does not specify which is the best item within the group, only the order in which the groups themselves shall be returned.

    • --sortby <clause> (-s <clause> for short) specifies that results should be sorted in the order listed in <clause>. This allows you to specify the order you wish results to be presented in, ordering by different columns. For example, you could say --sortby "@weight DESC entrytime DESC" to sort entries first by weight (or relevance) and where two or more entries have the same weight, to then sort by the time with the highest time (newest) first. You will usually need to put the items in quotes (--sortby "@weight DESC") or use commas (--sortby @weight,DESC) to avoid the items being treated separately. Additionally, like the regular sorting modes, if --group (grouping) is being used, this will state how to establish the best match within each group.

    • --sortexpr expr (-S expr for short) specifies that the search results should be presented in an order determined by an arithmetic expression, stated in expr. For example: --sortexpr "@weight + ( user_karma + ln(pageviews) )*0.1" (again noting that this will have to be quoted to avoid the shell dealing with the asterisk). Extended sort mode is discussed in more detail under the SPH_SORT_EXTENDED entry under the Sorting modes section of the manual.

    • --sort=date specifies that the results should be sorted by descending (i.e. most recent first) date. This requires that there is an attribute in the index that is set as a timestamp.

    • --rsort=date specifies that the results should be sorted by ascending (i.e. oldest first) date. This requires that there is an attribute in the index that is set as a timestamp.

    • --sort=ts specifies that the results should be sorted by timestamp in groups; it will return all of the documents whose timestamp is within the last hour, then sorted within that bracket for relevance. After, it would return the documents from the last day, sorted by relevance, then the last week and then the last month. It is discussed in more detail under the SPH_SORT_TIME_SEGMENTS entry under the Sorting modes section of the manual.

    Other options:

    • --noinfo (-q for short) instructs search not to look-up data in your SQL database. Specifically, for debugging with MySQL and search, you can provide it with a query to look up the full article based on the returned document ID. It is explained in more detail under the sql_query_info directive.

    6.4. spelldump command reference

    spelldump is one of the helper tools within the Sphinx package.

    It is used to extract the contents of a dictionary file that uses ispell or MySpell format, which can help build word lists for wordforms - all of the possible forms are pre-built for you.

    Its general usage is:

    spelldump [options] <dictionary> <affix> [result] [locale-name]
    

    The two main parameters are the dictionary's main file and its affix file; usually these are named as [language-prefix].dict and [language-prefix].aff and will be available with most common Linux distributions, as well as various places online.

    [result] specifies where the dictionary data should be output to, and [locale-name] additionally specifies the locale details you wish to use.

    There is an additional option, -c [file], which specifies a file for case conversion details.

    Examples of its usage are:

    spelldump en.dict en.aff
    spelldump ru.dict ru.aff ru.txt ru_RU.CP1251
    spelldump ru.dict ru.aff ru.txt .1251
    

    The results file will contain a list of all the words in the dictionary in alphabetical order, output in the format of a wordforms file, which you can use to customise for your specific circumstances. An example of the result file:

    zone > zone
    zoned > zoned
    zoning > zoning
    

    6.5. indextool command reference

    indextool is one of the helper tools within the Sphinx package, introduced in version 0.9.9-rc2. It is used to dump miscellaneous debug information about the physical index. (Additional functionality such as index verification is planned in the future, hence the indextool name rather than just indexdump.) Its general usage is:

    indextool <command> [options]
    

    The only currently available option applies to all commands and lets you specify the configuration file:

    • --config <file> (-c <file> for short) overrides the built-in config file names.

    The commands are as follows:

    • --dumpheader FILENAME.sph quickly dumps the provided index header file without touching any other index files or even the configuration file. The report provides a breakdown of all the index settings, in particular the entire attribute and field list. Prior to 0.9.9-rc2, this command was present in CLI search utility.

    • --dumpconfig FILENAME.sph dumps the index definition from the given index header file in (almost) compliant sphinx.conf file format. Added in version 2.0.1-beta.

    • --dumpheader INDEXNAME dumps index header by index name with looking up the header path in the configuration file.

    • --dumpdocids INDEXNAME dumps document IDs by index name. It takes the data from attribute (.spa) file and therefore requires docinfo=extern to work.

    • --dumphitlist INDEXNAME KEYWORD dumps all the hits (occurences) of a given keyword in a given index, with keyword specified as text.

    • --dumphitlist INDEXNAME --wordid ID dumps all the hits (occurences) of a given keyword in a given index, with keyword specified as internal numeric ID.

    • --htmlstrip INDEXNAME filters stdin using HTML stripper settings for a given index, and prints the filtering results to stdout. Note that the settings will be taken from sphinx.conf, and not the index header.

    • --check INDEXNAME checks the index data files for consistency errors that might be introduced either by bugs in indexer and/or hardware faults. Starting with version 2.0.2-beta, --check also works on RT indexes, but checks disk chunks only.

    • --strip-path strips the path names from all the file names referenced from the index (stopwords, wordforms, exceptions, etc). This is useful for checking indexes built on another machine with possibly different path layouts.

    Chapter 7. SphinxQL reference

    SphinxQL is our SQL dialect that exposes all of the search daemon functionality using a standard SQL syntax with a few Sphinx-specific extensions. Everything available via the SphinxAPI is also available SphinxQL but not vice versa; for instance, writes into RT indexes are only available via SphinxQL. This chapter documents supported SphinxQL statements syntax.

    7.1. SELECT syntax

    SELECT
        select_expr [, select_expr ...]
        FROM index [, index2 ...]
        [WHERE where_condition]
        [GROUP BY {col_name | expr_alias}]
        [ORDER BY {col_name | expr_alias} {ASC | DESC} [, ...]]
        [WITHIN GROUP ORDER BY {col_name | expr_alias} {ASC | DESC}]
        [LIMIT offset, row_count]
        [OPTION opt_name = opt_value [, ...]]
    

    SELECT statement was introduced in version 0.9.9-rc2. It's syntax is based upon regular SQL but adds several Sphinx-specific extensions and has a few omissions (such as (currently) missing support for JOINs). Specifically,

    • Column list clause. Column names, arbitrary expressions, and star ('*') are all allowed (ie. SELECT @id, group_id*123+456 AS expr1 FROM test1 will work). Unlike in regular SQL, all computed expressions must be aliased with a valid identifier. Starting with version 2.0.1-beta, AS is optional. Special names such as @id and @weight should currently be used with leading at-sign. This at-sign requirement will be lifted in the future.

    • FROM clause. FROM clause should contain the list of indexes to search through. Unlike in regular SQL, comma means enumeration of full-text indexes as in Query() API call rather than JOIN.

    • WHERE clause. This clause will map both to fulltext query and filters. Comparison operators (=, !=, <, >, <=, >=), IN, AND, NOT, and BETWEEN are all supported and map directly to filters. OR is not supported yet but will be in the future. MATCH('query') is supported and maps to fulltext query. Query will be interpreted according to full-text query language rules. There must be at most one MATCH() in the clause. Starting with version 2.0.1-beta, {col_name | expr_alias} [NOT] IN @uservar condition syntax is supported. (Refer to Section 7.7, “SET syntax” for a discussion of global user variables.)

    • GROUP BY clause. Currently only supports grouping by a single column. The column however can be a computed expression:

      SELECT *, group_id*1000+article_type AS gkey FROM example GROUP BY gkey
      

      Aggregate functions (AVG(), MIN(), MAX(), SUM()) in column list clause are supported. Arguments to aggregate functions can be either plain attributes or arbitrary expressions. COUNT(*) is implicitly supported as using GROUP BY will add @count column to result set. Explicit support might be added in the future. COUNT(DISTINCT attr) is supported. Currently there can be at most one COUNT(DISTINCT) per query and an argument needs to be an attribute. Both current restrictions on COUNT(DISTINCT) might be lifted in the future.

      SELECT *, AVG(price) AS avgprice, COUNT(DISTINCT storeid)
      FROM products
      WHERE MATCH('ipod')
      GROUP BY vendorid
      

      Starting with 2.0.1-beta, GROUP BY on a string attribute is supported, with respect for current collation (see Section 5.12, “Collations”).

    • WITHIN GROUP ORDER BY clause. This is a Sphinx specific extension that lets you control how the best row within a group will to be selected. The syntax matches that of regular ORDER BY clause:

      SELECT *, INTERVAL(posted,NOW()-7*86400,NOW()-86400) AS timeseg
      FROM example WHERE MATCH('my search query')
      GROUP BY siteid
      WITHIN GROUP ORDER BY @weight DESC
      ORDER BY timeseg DESC, @weight DESC
      

      Starting with 2.0.1-beta, WITHIN GROUP ORDER BY on a string attribute is supported, with respect for current collation (see Section 5.12, “Collations”).

    • ORDER BY clause. Unlike in regular SQL, only column names (not expressions) are allowed and explicit ASC and DESC are required. The columns however can be computed expressions:

      SELECT *, @weight*10+docboost AS skey FROM example ORDER BY skey
      

      Starting with 2.0.1-beta, ORDER BY on a string attribute is supported, with respect for current collation (see Section 5.12, “Collations”).

      Starting with 2.0.2-beta, ORDER BY RAND() syntax is supported. Note that this syntax is actually going to randomize the weight values and then order matches by those randomized weights.

    • LIMIT clause. Both LIMIT N and LIMIT M,N forms are supported. Unlike in regular SQL (but like in Sphinx API), an implicit LIMIT 0,20 is present by default.

    • OPTION clause. This is a Sphinx specific extension that lets you control a number of per-query options. The syntax is:

      OPTION <optionname>=<value> [ , ... ]
      

      Supported options and respectively allowed values are:

      • 'ranker' - any of 'proximity_bm25', 'bm25', 'none', 'wordcount', 'proximity', 'matchany', or 'fieldmask'

      • 'max_matches' - integer (per-query max matches value)

      • 'cutoff' - integer (max found matches threshold)

      • 'max_query_time' - integer (max search time threshold, msec)

      • 'retry_count' - integer (distributed retries count)

      • 'retry_delay' - integer (distributed retry delay, msec)

      • 'field_weights' - a named integer list (per-field user weights for ranking)

      • 'index_weights' - a named integer list (per-index user weights for ranking)

      • 'reverse_scan' - 0 or 1, lets you control the order in which full-scan query processes the rows

      • 'comment' - string, user comment that gets copied to a query log file

      Example:

      SELECT * FROM test WHERE MATCH('@title hello @body world')
      OPTION ranker=bm25, max_matches=3000,
          field_weights=(title=10, body=3)
      

    7.2. SHOW META syntax

    SHOW META
    

    SHOW META shows additional meta-information about the latest query such as query time and keyword statistics:

    mysql> SELECT * FROM test1 WHERE MATCH('test|one|two');
    +------+--------+----------+------------+
    | id   | weight | group_id | date_added |
    +------+--------+----------+------------+
    |    1 |   3563 |      456 | 1231721236 |
    |    2 |   2563 |      123 | 1231721236 |
    |    4 |   1480 |        2 | 1231721236 |
    +------+--------+----------+------------+
    3 rows in set (0.01 sec)
    
    mysql> SHOW META;
    +---------------+-------+
    | Variable_name | Value |
    +---------------+-------+
    | total         | 3     |
    | total_found   | 3     |
    | time          | 0.005 |
    | keyword[0]    | test  |
    | docs[0]       | 3     |
    | hits[0]       | 5     |
    | keyword[1]    | one   |
    | docs[1]       | 1     |
    | hits[1]       | 2     |
    | keyword[2]    | two   |
    | docs[2]       | 1     |
    | hits[2]       | 2     |
    +---------------+-------+
    12 rows in set (0.00 sec)
    

    7.3. SHOW WARNINGS syntax

    SHOW WARNINGS
    

    SHOW WARNINGS statement, introduced in version 0.9.9-rc2, can be used to retrieve the warning produced by the latest query. The error message will be returned along with the query itself:

    mysql> SELECT * FROM test1 WHERE MATCH('@@title hello') \G
    ERROR 1064 (42000): index test1: syntax error, unexpected TOK_FIELDLIMIT
    near '@title hello'
    
    mysql> SELECT * FROM test1 WHERE MATCH('@title -hello') \G
    ERROR 1064 (42000): index test1: query is non-computable (single NOT operator)
    
    mysql> SELECT * FROM test1 WHERE MATCH('"test doc"/3') \G
    *************************** 1. row ***************************
            id: 4
        weight: 2500
      group_id: 2
    date_added: 1231721236
    1 row in set, 1 warning (0.00 sec)
    
    mysql> SHOW WARNINGS \G
    *************************** 1. row ***************************
      Level: warning
       Code: 1000
    Message: quorum threshold too high (words=2, thresh=3); replacing quorum operator
             with AND operator
    1 row in set (0.00 sec)
    

    7.4. SHOW STATUS syntax

    SHOW STATUS, introduced in version 0.9.9-rc2, displays a number of useful performance counters. IO and CPU counters will only be available if searchd was started with --iostats and --cpustats switches respectively.

    mysql> SHOW STATUS;
    +--------------------+-------+
    | Variable_name      | Value |
    +--------------------+-------+
    | uptime             | 216   |
    | connections        | 3     |
    | maxed_out          | 0     |
    | command_search     | 0     |
    | command_excerpt    | 0     |
    | command_update     | 0     |
    | command_keywords   | 0     |
    | command_persist    | 0     |
    | command_status     | 0     |
    | agent_connect      | 0     |
    | agent_retry        | 0     |
    | queries            | 10    |
    | dist_queries       | 0     |
    | query_wall         | 0.075 |
    | query_cpu          | OFF   |
    | dist_wall          | 0.000 |
    | dist_local         | 0.000 |
    | dist_wait          | 0.000 |
    | query_reads        | OFF   |
    | query_readkb       | OFF   |
    | query_readtime     | OFF   |
    | avg_query_wall     | 0.007 |
    | avg_query_cpu      | OFF   |
    | avg_dist_wall      | 0.000 |
    | avg_dist_local     | 0.000 |
    | avg_dist_wait      | 0.000 |
    | avg_query_reads    | OFF   |
    | avg_query_readkb   | OFF   |
    | avg_query_readtime | OFF   |
    +--------------------+-------+
    29 rows in set (0.00 sec)
    

    7.5. INSERT and REPLACE syntax

    {INSERT | REPLACE} INTO index [(column, ...)]
    	VALUES (value, ...)
    	[, (...)]
    

    INSERT statement, introduced in version 1.10-beta, is only supported for RT indexes. It inserts new rows (documents) into an existing index, with the provided column values.

    ID column must be present in all cases. Rows with duplicate IDs will not be overwritten by INSERT; use REPLACE to do that.

    index is the name of RT index into which the new row(s) should be inserted. The optional column names list lets you only explicitly specify values for some of the columns present in the index. All the other columns will be filled with their default values (0 for scalar types, empty string for text types).

    Expressions are not currently supported in INSERT and values should be explicitly specified.

    Multiple rows can be inserted using a single INSERT statement by providing several comma-separated, parens-enclosed lists of rows values.

    7.6. DELETE syntax

    DELETE FROM index WHERE {id = value | id IN (val1 [, val2 [, ...]])}
    

    DELETE statement, introduced in version 1.10-beta, is only supported for RT indexes. It deletes existing rows (documents) from an existing index based on ID.

    index is the name of RT index from which the row should be deleted. value is the row ID to be deleted. Support for batch id IN (2,3,5) syntax was added in version 2.0.1-beta.

    Additional types of WHERE conditions (such as conditions on attributes, etc) are planned, but not supported yet as of 1.10-beta.

    7.7. SET syntax

    SET [GLOBAL] server_variable_name = value
    SET GLOBAL @user_variable_name = (int_val1 [, int_val2, ...])
    SET NAMES value
    SET @@dummy_variable = ignored_value
    

    SET statement, introduced in version 1.10-beta, modifies a variable value. The variable names are case-insensitive. No variable value changes survive server restart.

    SET NAMES statement and SET @@variable_name syntax, both introduced in version 2.0.2-beta, do nothing. They were implemented to maintain compatibility with 3rd party MySQL client libraries, connectors, and frameworks that may need to run this statement when connecting.

    There are the following classes of the variables:

    1. per-session server variable (1.10-beta and above)

    2. global server variable (2.0.1-beta and above)

    3. global user variable (2.0.1-beta and above)

    Global user variables are shared between concurrent sessions. Currently, the only supported value type is the list of BIGINTs, and these variables can only be used along with IN() for filtering purpose. The intended usage scenario is uploading huge lists of values to searchd (once) and reusing them (many times) later, saving on network overheads. Example:

    // in session 1
    mysql> SET GLOBAL @myfilter=(2,3,5,7,11,13);
    Query OK, 0 rows affected (0.00 sec)
    
    // later in session 2
    mysql> SELECT * FROM test1 WHERE group_id IN @myfilter;
    +------+--------+----------+------------+-----------------+------+
    | id   | weight | group_id | date_added | title           | tag  |
    +------+--------+----------+------------+-----------------+------+
    |    3 |      1 |        2 | 1299338153 | another doc     | 15   |
    |    4 |      1 |        2 | 1299338153 | doc number four | 7,40 |
    +------+--------+----------+------------+-----------------+------+
    2 rows in set (0.02 sec)
    

    Per-session and global server variables affect certain server settings in the respective scope. Known per-session server variables are:

    AUTOCOMMIT = {0 | 1}

    Whether any data modification statement should be implicitly wrapped by BEGIN and COMMIT. Introduced in version 1.10-beta.

    COLLATION_CONNECTION = collation_name

    Selects the collation to be used for ORDER BY or GROUP BY on string values in the subsequent queries. Refer to Section 5.12, “Collations” for a list of known collation names. Introduced in version 2.0.1-beta.

    CHARACTER_SET_RESULTS = charset_name

    Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.1-beta.

    SQL_AUTO_IS_NULL = value

    Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.2-beta.

    SQL_MODE = value

    Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.2-beta.

    Known global server variables are:

    QUERY_LOG_FORMAT = {plain | sphinxql}

    Changes the current log format. Introduced in version 2.0.1-beta.

    LOG_LEVEL = {info | debug | debugv | debugvv}

    Changes the current log verboseness level. Introduced in version 2.0.1-beta.

    Examples:

    mysql> SET autocommit=0;
    Query OK, 0 rows affected (0.00 sec)
    
    mysql> SET GLOBAL query_log_format=sphinxql;
    Query OK, 0 rows affected (0.00 sec)
    

    7.8. SET TRANSACTION syntax

    SET TRANSACTION ISOLATION LEVEL { READ UNCOMMITTED
    	| READ COMMITTED
    	| REPEATABLE READ
    	| SERIALIZABLE }
    

    SET TRANSACTION statement, introduced in version 2.0.2-beta, does nothing. It was implemented to maintain compatibility with 3rd party MySQL client libraries, connectors, and frameworks that may need to run this statement when connecting.

    Example:

    mysql> SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;
    Query OK, 0 rows affected (0.00 sec)
    

    7.9. BEGIN, COMMIT, and ROLLBACK syntax

    START TRANSACTION | BEGIN
    COMMIT
    ROLLBACK
    SET AUTOCOMMIT = {0 | 1}
    

    BEGIN, COMMIT, and ROLLBACK statements were introduced in version 1.10-beta. BEGIN statement (or its START TRANSACTION alias) forcibly commits pending transaction, if any, and begins a new one. COMMIT statement commits the current transaction, making all its changes permanent. ROLLBACK statement rolls back the current transaction, canceling all its changes. SET AUTOCOMMIT controls the autocommit mode in the active session.

    AUTOCOMMIT is set to 1 by default, meaning that every statement that perfoms any changes on any index is implicitly wrapped in BEGIN and COMMIT.

    Transactions are limited to a single RT index, and also limited in size. They are atomic, consistent, overly isolated, and durable. Overly isolated means that the changes are not only invisible to the concurrent transactions but even to the current session itself.

    7.10. CALL SNIPPETS syntax

    CALL SNIPPETS(data, index, query[, opt_value AS opt_name[, ...]])
    

    CALL SNIPPETS statement, introduced in version 1.10-beta, builds a snippet from provided data and query, using specified index settings.

    data is the source data to extract a snippet from. It could be a single string, or the list of the strings enclosed in curly brackets. index is the name of the index from which to take the text processing settings. query is the full-text query to build snippets for. Additional options are documented in Section 8.7.1, “BuildExcerpts”. Usage example:

    CALL SNIPPETS('this is my document text', 'test1', 'hello world',
        5 AS around, 200 AS limit);
    CALL SNIPPETS(('this is my document text','this is my another text'), 'test1', 'hello world',
        5 AS around, 200 AS limit);
    CALL SNIPPETS(('data/doc1.txt','data/doc2.txt','/home/sphinx/doc3.txt'), 'test1', 'hello world',
        5 AS around, 200 AS limit, 1 AS load_files);
    

    7.11. CALL KEYWORDS syntax

    CALL KEYWORDS(text, index, [hits])
    

    CALL KEYWORDS statement, introduced in version 1.10-beta, splits text into particular keywords. It returns tokenized and normalized forms of the keywords, and, optionally, keyword statistics.

    text is the text to break down to keywords. index is the name of the index from which to take the text processing settings. hits is an optional boolean parameter that specifies whether to return document and hit occurrence statistics.

    7.12. SHOW TABLES syntax

    SHOW TABLES
    

    SHOW TABLES statement, introduced in version 2.0.1-beta, enumerates all currently active indexes along with their types. As of 2.0.1-beta, existing index types are local, distributed, and rt respectively. Example:

    mysql> SHOW TABLES;
    +-------+-------------+
    | Index | Type        |
    +-------+-------------+
    | dist1 | distributed |
    | rt    | rt          |
    | test1 | local       |
    | test2 | local       |
    +-------+-------------+
    4 rows in set (0.00 sec)
    

    7.13. DESCRIBE syntax

    {DESC | DESCRIBE} index
    

    DESCRIBE statement, introduced in version 2.0.1-beta, lists index columns and their associated types. Columns are document ID, full-text fields, and attributes. The order matches that in which fields and attributes are expected by INSERT and REPLACE statements. As of 2.0.1-beta, column types are field, integer, timestamp, ordinal, bool, float, bigint, string, and mva. ID column will be typed either integer or bigint based on whether the binaries were built with 32-bit or 64-bit document ID support. Example:

    mysql> DESC rt;
    +---------+---------+
    | Field   | Type    |
    +---------+---------+
    | id      | integer |
    | title   | field   |
    | content | field   |
    | gid     | integer |
    +---------+---------+
    4 rows in set (0.00 sec)
    

    7.14. CREATE FUNCTION syntax

    CREATE FUNCTION udf_name
    	RETURNS {INT | BIGINT | FLOAT}
    	SONAME 'udf_lib_file'
    

    CREATE FUNCTION statement, introduced in version 2.0.1-beta, installs a user-defined function (UDF) with the given name and type from the given library file. The library file must reside in a trusted plugin_dir directory. On success, the function is available for use in all subsequent queries that the server receives. Example:

    mysql> CREATE FUNCTION avgmva RETURNS INT SONAME 'udfexample.dll';
    Query OK, 0 rows affected (0.03 sec)
    
    mysql> SELECT *, AVGMVA(tag) AS q from test1;
    +------+--------+---------+-----------+
    | id   | weight | tag     | q         |
    +------+--------+---------+-----------+
    |    1 |      1 | 1,3,5,7 | 4.000000  |
    |    2 |      1 | 2,4,6   | 4.000000  |
    |    3 |      1 | 15      | 15.000000 |
    |    4 |      1 | 7,40    | 23.500000 |
    +------+--------+---------+-----------+
    

    7.15. DROP FUNCTION syntax

    DROP FUNCTION udf_name
    

    DROP FUNCTION statement, introduced in version 2.0.1-beta, deinstalls a user-defined function (UDF) with the given name. On success, the function is no longer available for use in subsequent queries. Pending concurrent queries will not be affected and the library unload, if necessary, will be postponed until those queries complete. Example:

    mysql> DROP FUNCTION avgmva;
    Query OK, 0 rows affected (0.00 sec)
    

    7.16. SHOW VARIABLES syntax

    SHOW [{GLOBAL | SESSION}] VARIABLES
    

    SHOW VARIABLES statement was added in version 2.0.1-beta to improve compatibility with 3rd party MySQL connectors and frameworks that automatically execute this statement.

    In version 2.0.1-beta, it did nothing.

    Starting from version 2.0.2-beta, it returns the current values of a few server-wide variables. Also, support for GLOBAL and SESSION clauses was added.

    mysql> SHOW GLOBAL VARIABLES;
    +----------------------+----------+
    | Variable_name        | Value    |
    +----------------------+----------+
    | autocommit           | 1        |
    | collation_connection | libc_ci  |
    | query_log_format     | sphinxql |
    | log_level            | info     |
    +----------------------+----------+
    4 rows in set (0.00 sec)
    

    7.17. SHOW COLLATION syntax

    SHOW COLLATION
    

    Added in version 2.0.1-beta, this is currently a placeholder query that does nothing and reports success. That is in order to keep compatibility with frameworks and connectors that automatically execute this statement.

    mysql> SHOW COLLATION;
    Query OK, 0 rows affected (0.00 sec)
    

    7.18. UPDATE syntax

    UPDATE index SET col1 = newval1 [, ...] WHERE where_condition
    

    UPDATE statement was added in version 2.0.1-beta. Multiple attributes and values can be specified in a single statement. Both RT and disk indexes are supported.

    As of version 2.0.2-beta, all atributes types (int, bigint, float, MVA) except for strings can be updated. Previously, some of the types were not supported.

    where_condition (also added in 2.0.2-beta) has the same syntax as in the SELECT statement (see Section 7.1, “SELECT syntax” for details).

    When assigning the out-of-range values to 32-bit attributes, they will be trimmed to their lower 32 bits without a prompt. For example, if you try to update the 32-bit unsigned int with a value of 4294967297, the value of 1 will actually be stored, because the lower 32 bits of 4294967297 (0x100000001 in hex) amount to 1 (0x00000001 in hex).

    MVA values sets for updating (and also for INSERT or REPLACE, refer to Section 7.5, “INSERT and REPLACE syntax”) must be specificed as comma-separated lists in parentheses. To erase the MVA value, just assign () to it.

    mysql> UPDATE myindex SET enabled=0 WHERE id=123;
    Query OK, 1 rows affected (0.00 sec)
    
    mysql> UPDATE myindex
      SET bigattr=-100000000000,
        fattr=3465.23,
        mvattr1=(3,6,4),
        mvattr2=()
      WHERE MATCH('hehe') AND enabled=1;
    Query OK, 148 rows affected (0.01 sec)
    

    7.19. ATTACH INDEX syntax

    ATTACH INDEX diskindex TO RTINDEX rtindex
    

    ATTACH INDEX statement, added in version 2.0.2-beta, lets you move data from a regular disk index to a RT index.

    After a successful ATTACH, the data originally stored in the source disk index becomes a part of the target RT index, and the source disk index becomes unavailable (until the next rebuild). ATTACH does not result in any index data changes. Basically, it just renames the files (making the source index a new disk chunk of the target RT index), and updates the metadata. So it is a generally quick operation which might (frequently) complete as fast as under a second.

    Note that when an index is attached to an empty RT index, the fields, attributes, and text processing settings (tokenizer, wordforms, etc) from the source index are copied over and take effect. The respective parts of the RT index definition from the configuration file will be ignored.

    As of 2.0.2-beta, ATTACH INDEX comes with a number of restrictions. Most notably, the target RT index is currently required to be empty, making ATTACH INDEX a one-time conversion operation only. Those restrictions may be lifted in future releases, as we add the needed functionality to the RT indexes. The complete list is as follows.

    • Target RT index needs to be empty.

    • Source disk index needs to have index_sp=0, boundary_step=0, stopword_step=1, dict=crc settings.

    • Source disk index needs to have an empty index_zones setting.

    mysql> DESC rt;
    +-----------+---------+
    | Field     | Type    |
    +-----------+---------+
    | id        | integer |
    | testfield | field   |
    | testattr  | uint    |
    +-----------+---------+
    3 rows in set (0.00 sec)
    
    mysql> SELECT * FROM rt;
    Empty set (0.00 sec)
    
    mysql> SELECT * FROM disk WHERE MATCH('test');
    +------+--------+----------+------------+
    | id   | weight | group_id | date_added |
    +------+--------+----------+------------+
    |    1 |   1304 |        1 | 1313643256 |
    |    2 |   1304 |        1 | 1313643256 |
    |    3 |   1304 |        1 | 1313643256 |
    |    4 |   1304 |        1 | 1313643256 |
    +------+--------+----------+------------+
    4 rows in set (0.00 sec)
    
    mysql> ATTACH INDEX disk TO RTINDEX rt;
    Query OK, 0 rows affected (0.00 sec)
    
    mysql> DESC rt;
    +------------+-----------+
    | Field      | Type      |
    +------------+-----------+
    | id         | integer   |
    | title      | field     |
    | content    | field     |
    | group_id   | uint      |
    | date_added | timestamp |
    +------------+-----------+
    5 rows in set (0.00 sec)
    
    mysql> SELECT * FROM rt WHERE MATCH('test');
    +------+--------+----------+------------+
    | id   | weight | group_id | date_added |
    +------+--------+----------+------------+
    |    1 |   1304 |        1 | 1313643256 |
    |    2 |   1304 |        1 | 1313643256 |
    |    3 |   1304 |        1 | 1313643256 |
    |    4 |   1304 |        1 | 1313643256 |
    +------+--------+----------+------------+
    4 rows in set (0.00 sec)
    
    mysql> SELECT * FROM disk WHERE MATCH('test');
    ERROR 1064 (42000): no enabled local indexes to search
    

    7.20. FLUSH RTINDEX syntax

    FLUSH RTINDEX rtindex
    

    FLUSH RTINDEX statement, added in version 2.0.2-beta, forcibly flushes RT index RAM chunk contents to disk.

    Backing up a RT index is as simple as copying over its data files, followed by the binary log. However, recovering from that backup means that all the transactions in the log since the last successful RAM chunk write would need to be replayed. Those writes normally happen either on a clean shutdown, or periodically with a (big enough!) interval between writes specified in rt_flush_period directive. So such a backup made at an arbitrary point in time just might end up with way too much binary log data to replay.

    FLUSH RTINDEX forcibly writes the RAM chunk contents to disk, and also causes the subsequent cleanup of (now-redundant) binary log files. Thus, recovering from a backup made just after FLUSH RTINDEX should be almost instant.

    mysql> FLUSH RTINDEX rt;
    Query OK, 0 rows affected (0.05 sec)
    

    7.21. Multi-statement queries

    Starting version 2.0.1-beta, SphinxQL supports multi-statement queries, or batches. Possible inter-statement optimizations described in Section 5.11, “Multi-queries” do apply to SphinxQL just as well. The batched queries should be separated by a semicolon. Your MySQL client library needs to support MySQL multi-query mechanism and multiple result set. For instance, mysqli interface in PHP and DBI/DBD libraries in Perl are known to work.

    Here's a PHP sample showing how to utilize mysqli interface with Sphinx.

    <?php
    
    $link = mysqli_connect ( "127.0.0.1", "root", "", "", 9306 );
    if ( mysqli_connect_errno() )
        die ( "connect failed: " . mysqli_connect_error() );
    
    $batch = "SELECT * FROM test1 ORDER BY group_id ASC;";
    $batch .= "SELECT * FROM test1 ORDER BY group_id DESC";
    
    if ( !mysqli_multi_query ( $link, $batch ) )
        die ( "query failed" );
    
    do
    {
        // fetch and print result set
        if ( $result = mysqli_store_result($link) )
        {
            while ( $row = mysqli_fetch_row($result) )
                printf ( "id=%s\n", $row[0] );
            mysqli_free_result($result);
        }
    
        // print divider
        if ( mysqli_more_results($link) )
            printf ( "------\n" );
    
    } while ( mysqli_next_result($link) );
    

    Its output with the sample test1 index included with Sphinx is as follows.

    $ php test_multi.php
    id=1
    id=2
    id=3
    id=4
    ------
    id=3
    id=4
    id=1
    id=2
    

    The following statements can currently be used in a batch: SELECT, SHOW WARNINGS, SHOW STATUS, and SHOW META. Arbitrary sequence of these statements are allowed. The results sets returned should match those that would be returned if the batched queries were sent one by one.

    7.22. Comment syntax

    Since version 2.0.1-beta, SphinxQL supports C-style comment syntax. Everything from an opening /* sequence to a closing */ sequence is ignored. Comments can span multiple lines, can not nest, and should not get logged. MySQL specific /*! ... */ comments are also currently ignored. (As the comments support was rather added for better compatibility with mysqldump produced dumps, rather than improving generaly query interoperability between Sphinx and MySQL.)

    SELECT /*! SQL_CALC_FOUND_ROWS */ col1 FROM table1 WHERE ...
    

    7.23. List of SphinxQL reserved keywords

    A complete alphabetical list of keywords that are currently reserved in SphinxQL syntax (and therefore can not be used as identifiers).

    AND
    AS
    ASC
    AVG
    BEGIN
    BETWEEN
    BY
    CALL
    COLLATION
    COMMIT
    COUNT
    DELETE
    DESC
    DESCRIBE
    DISTINCT
    FALSE
    FROM
    GLOBAL
    GROUP
    ID
    IN
    INSERT
    INTO
    LIMIT
    MATCH
    MAX
    META
    MIN
    NOT
    NULL
    OPTION
    OR
    ORDER
    REPLACE
    ROLLBACK
    SELECT
    SET
    SHOW
    START
    STATUS
    SUM
    TABLES
    TRANSACTION
    TRUE
    UPDATE
    VALUES
    VARIABLES
    WARNINGS
    WEIGHT
    WHERE
    WITHIN
    

    7.24. SphinxQL upgrade notes, version 2.0.1-beta

    This section only applies to existing applications that use SphinxQL versions prior to 2.0.1-beta.

    In previous versions, SphinxQL just wrapped around SphinxAPI and inherited its magic columns and column set quirks. Essentially, SphinxQL queries could return (slightly) different columns and in a (slightly) different order than it was explicitly requested in the query. Namely, weight magic column (which is not a real column in any index) was added at all times, and GROUP BY related @count, @group, and @distinct magic columns were conditionally added when grouping. Also, the order of columns (attributes) in the result set was actually taken from the index rather than the query. (So if you asked for columns C, B, A in your query but they were in the A, B, C order in the index, they would have been returned in the A, B, C order.)

    In version 2.0.1-beta, we fixed that. SphinxQL is now more SQL compliant (and will be further brought in as much compliance with standard SQL syntax as possible). That is not yet a breaking change, because searchd now supports compat_sphinxql_magics directive that flips between the old "compatibility" mode and the new "compliance" mode. However, the compatibility mode support is going to be removed in future, so it's strongly advised to update SphinxQL applications and switch to the compliance mode.

    The important changes are as follows:

    • @ID magic name is deprecated in favor of ID. Document ID is considered an attribute.

    • WEIGHT is no longer implicitly returned, because it is not actually a column (an index attribute), but rather an internal function computed per each row (a match). You have to explicitly ask for it, using the WEIGHT() function. (The requirement to alias the result will be lifted in the next release.)

      SELECT id, WEIGHT() w FROM myindex WHERE MATCH('test')
      

    • You can now use quoted reserved keywords as aliases. The quote character is backtick ("`", ASCII code 96 decimal, 60 hex). One particularly useful example would be returning weight column like the old mode:

      SELECT id, WEIGHT() `weight` FROM myindex WHERE MATCH('test')
      

    • The column order is now different and should now match the one expliclitly defined in the query. So if you are accessing columns based on their position in the result set rather than the name (for instance, by using mysql_fetch_row() rather than mysql_fetch_assoc() in PHP), check and fix the order of columns in your queries.

    • SELECT * return the columns in index order, as it used to, including the ID column. However, SELECT * does not automatically return WEIGHT(). To update such queries in case you access columns by names, simply add it to the query:

      SELECT *, WEIGHT() `weight` FROM myindex WHERE MATCH('test')
      

      Otherwise, i.e., in case you rely on column order, select ID, weight, and then other columns:

      SELECT id, *, WEIGHT() `weight` FROM myindex WHERE MATCH('test')
      

    • Magic @count and @distinct attributes are no longer implicitly returned. You now have to explicitly ask for them when using GROUP BY. (Also note that you currently have to alias them; that requirement will be lifted in the future.)

      SELECT gid, COUNT(*) q FROM myindex WHERE MATCH('test')
      GROUP BY gid ORDER BY q DESC
      

    Chapter 8. API reference

    There is a number of native searchd client API implementations for Sphinx. As of time of this writing, we officially support our own PHP, Python, and Java implementations. There also are third party free, open-source API implementations for Perl, Ruby, and C++.

    The reference API implementation is in PHP, because (we believe) Sphinx is most widely used with PHP than any other language. This reference documentation is in turn based on reference PHP API, and all code samples in this section will be given in PHP.

    However, all other APIs provide the same methods and implement the very same network protocol. Therefore the documentation does apply to them as well. There might be minor differences as to the method naming conventions or specific data structures used. But the provided functionality must not differ across languages.

    8.1. General API functions

    8.1.1. GetLastError

    Prototype: function GetLastError()

    Returns last error message, as a string, in human readable format. If there were no errors during the previous API call, empty string is returned.

    You should call it when any other function (such as Query()) fails (typically, the failing function returns false). The returned string will contain the error description.

    The error message is not reset by this call; so you can safely call it several times if needed.

    8.1.2. GetLastWarning

    Prototype: function GetLastWarning ()

    Returns last warning message, as a string, in human readable format. If there were no warnings during the previous API call, empty string is returned.

    You should call it to verify whether your request (such as Query()) was completed but with warnings. For instance, search query against a distributed index might complete succesfully even if several remote agents timed out. In that case, a warning message would be produced.

    The warning message is not reset by this call; so you can safely call it several times if needed.

    8.1.3. SetServer

    Prototype: function SetServer ( $host, $port )

    Sets searchd host name and TCP port. All subsequent requests will use the new host and port settings. Default host and port are 'localhost' and 9312, respectively.

    8.1.4. SetRetries

    Prototype: function SetRetries ( $count, $delay=0 )

    Sets distributed retry count and delay.

    On temporary failures searchd will attempt up to $count retries per agent. $delay is the delay between the retries, in milliseconds. Retries are disabled by default. Note that this call will not make the API itself retry on temporary failure; it only tells searchd to do so. Currently, the list of temporary failures includes all kinds of connect() failures and maxed out (too busy) remote agents.

    8.1.5. SetConnectTimeout

    Prototype: function SetConnectTimeout ( $timeout )

    Sets the time allowed to spend connecting to the server before giving up.

    Under some circumstances, the server can be delayed in responding, either due to network delays, or a query backlog. In either instance, this allows the client application programmer some degree of control over how their program interacts with searchd when not available, and can ensure that the client application does not fail due to exceeding the script execution limits (especially in PHP).

    In the event of a failure to connect, an appropriate error code should be returned back to the application in order for application-level error handling to advise the user.

    8.1.6. SetArrayResult

    Prototype: function SetArrayResult ( $arrayresult )

    PHP specific. Controls matches format in the search results set (whether matches should be returned as an array or a hash).

    $arrayresult argument must be boolean. If $arrayresult is false (the default mode), matches will returned in PHP hash format with document IDs as keys, and other information (weight, attributes) as values. If $arrayresult is true, matches will be returned as a plain array with complete per-match information including document ID.

    Introduced along with GROUP BY support on MVA attributes. Group-by-MVA result sets may contain duplicate document IDs. Thus they need to be returned as plain arrays, because hashes will only keep one entry per document ID.

    8.1.7. IsConnectError

    Prototype: function IsConnectError ()

    Checks whether the last error was a network error on API side, or a remote error reported by searchd. Returns true if the last connection attempt to searchd failed on API side, false otherwise (if the error was remote, or there were no connection attempts at all). Introduced in version 0.9.9-rc1.

    8.2. General query settings

    8.2.1. SetLimits

    Prototype: function SetLimits ( $offset, $limit, $max_matches=0, $cutoff=0 )

    Sets offset into server-side result set ($offset) and amount of matches to return to client starting from that offset ($limit). Can additionally control maximum server-side result set size for current query ($max_matches) and the threshold amount of matches to stop searching at ($cutoff). All parameters must be non-negative integers.

    First two parameters to SetLimits() are identical in behavior to MySQL LIMIT clause. They instruct searchd to return at most $limit matches starting from match number $offset. The default offset and limit settings are 0 and 20, that is, to return first 20 matches.

    max_matches setting controls how much matches searchd will keep in RAM while searching. All matching documents will be normally processed, ranked, filtered, and sorted even if max_matches is set to 1. But only best N documents are stored in memory at any given moment for performance and RAM usage reasons, and this setting controls that N. Note that there are two places where max_matches limit is enforced. Per-query limit is controlled by this API call, but there also is per-server limit controlled by max_matches setting in the config file. To prevent RAM usage abuse, server will not allow to set per-query limit higher than the per-server limit.

    You can't retrieve more than max_matches matches to the client application. The default limit is set to 1000. Normally, you must not have to go over this limit. One thousand records is enough to present to the end user. And if you're thinking about pulling the results to application for further sorting or filtering, that would be much more efficient if performed on Sphinx side.

    $cutoff setting is intended for advanced performance control. It tells searchd to forcibly stop search query once $cutoff matches had been found and processed.

    8.2.2. SetMaxQueryTime

    Prototype: function SetMaxQueryTime ( $max_query_time )

    Sets maximum search query time, in milliseconds. Parameter must be a non-negative integer. Default valus is 0 which means "do not limit".

    Similar to $cutoff setting from SetLimits(), but limits elapsed query time instead of processed matches count. Local search queries will be stopped once that much time has elapsed. Note that if you're performing a search which queries several local indexes, this limit applies to each index separately.

    8.2.3. SetOverride

    Prototype: function SetOverride ( $attrname, $attrtype, $values )

    Sets temporary (per-query) per-document attribute value overrides. Only supports scalar attributes. $values must be a hash that maps document IDs to overridden attribute values. Introduced in version 0.9.9-rc1.

    Override feature lets you "temporary" update attribute values for some documents within a single query, leaving all other queries unaffected. This might be useful for personalized data. For example, assume you're implementing a personalized search function that wants to boost the posts that the user's friends recommend. Such data is not just dynamic, but also personal; so you can't simply put it in the index because you don't want everyone's searches affected. Overrides, on the other hand, are local to a single query and invisible to everyone else. So you can, say, setup a "friends_weight" value for every document, defaulting to 0, then temporary override it with 1 for documents 123, 456 and 789 (recommended by exactly the friends of current user), and use that value when ranking.

    8.2.4. SetSelect

    Prototype: function SetSelect ( $clause )

    Sets the select clause, listing specific attributes to fetch, and expressions to compute and fetch. Clause syntax mimics SQL. Introduced in version 0.9.9-rc1.

    SetSelect() is very similar to the part of a typical SQL query between SELECT and FROM. It lets you choose what attributes (columns) to fetch, and also what expressions over the columns to compute and fetch. A certain difference from SQL is that expressions must always be aliased to a correct identifier (consisting of letters and digits) using 'AS' keyword. SQL also lets you do that but does not require to. Sphinx enforces aliases so that the computation results can always be returned under a "normal" name in the result set, used in other clauses, etc.

    Everything else is basically identical to SQL. Star ('*') is supported. Functions are supported. Arbitrary amount of expressions is supported. Computed expressions can be used for sorting, filtering, and grouping, just as the regular attributes.

    Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported when using GROUP BY.

    Expression sorting (Section 5.6, “SPH_SORT_EXPR mode”) and geodistance functions (Section 8.4.5, “SetGeoAnchor”) are now internally implemented using this computed expressions mechanism, using magic names '@expr' and '@geodist' respectively.

    Example:

    $cl->SetSelect ( "*, @weight+(user_karma+ln(pageviews))*0.1 AS myweight" );
    $cl->SetSelect ( "exp_years, salary_gbp*{$gbp_usd_rate} AS salary_usd,
       IF(age>40,1,0) AS over40" );
    $cl->SetSelect ( "*, AVG(price) AS avgprice" );
    

    8.3. Full-text search query settings

    8.3.1. SetMatchMode

    Prototype: function SetMatchMode ( $mode )

    Sets full-text query matching mode, as described in Section 5.1, “Matching modes”. Parameter must be a constant specifying one of the known modes.

    WARNING: (PHP specific) you must not take the matching mode constant name in quotes, that syntax specifies a string and is incorrect:

    $cl->SetMatchMode ( "SPH_MATCH_ANY" ); // INCORRECT! will not work as expected
    $cl->SetMatchMode ( SPH_MATCH_ANY ); // correct, works OK
    

    8.3.2. SetRankingMode

    Prototype: function SetRankingMode ( $ranker, $rankexpr="" )

    Sets ranking mode (aka ranker). Only available in SPH_MATCH_EXTENDED matching mode. Parameter must be a constant specifying one of the known rankers.

    By default, in the EXTENDED matching mode Sphinx computes two factors which contribute to the final match weight. The major part is a phrase proximity value between the document text and the query. The minor part is so-called BM25 statistical function, which varies from 0 to 1 depending on the keyword frequency within document (more occurrences yield higher weight) and within the whole index (more rare keywords yield higher weight).

    However, in some cases you'd want to compute weight differently - or maybe avoid computing it at all for performance reasons because you're sorting the result set by something else anyway. This can be accomplished by setting the appropriate ranking mode. The list of the modes is available in Section 5.4, “Search results ranking”.

    $rankexpr argument was added in version 2.0.2-beta. It lets you specify a ranking formula to use with the expression based ranker, that is, when $ranker is set to SPH_RANK_EXPR. In all other cases, $rankexpr is ignored.

    8.3.3. SetSortMode

    Prototype: function SetSortMode ( $mode, $sortby="" )

    Set matches sorting mode, as described in Section 5.6, “Sorting modes”. Parameter must be a constant specifying one of the known modes.

    WARNING: (PHP specific) you must not take the matching mode constant name in quotes, that syntax specifies a string and is incorrect:

    $cl->SetSortMode ( "SPH_SORT_ATTR_DESC" ); // INCORRECT! will not work as expected
    $cl->SetSortMode ( SPH_SORT_ATTR_ASC ); // correct, works OK
    

    8.3.4. SetWeights

    Prototype: function SetWeights ( $weights )

    Binds per-field weights in the order of appearance in the index. DEPRECATED, use SetFieldWeights() instead.

    8.3.5. SetFieldWeights

    Prototype: function SetFieldWeights ( $weights )

    Binds per-field weights by name. Parameter must be a hash (associative array) mapping string field names to integer weights.

    Match ranking can be affected by per-field weights. For instance, see Section 5.4, “Search results ranking” for an explanation how phrase proximity ranking is affected. This call lets you specify what non-default weights to assign to different full-text fields.

    The weights must be positive 32-bit integers. The final weight will be a 32-bit integer too. Default weight value is 1. Unknown field names will be silently ignored.

    There is no enforced limit on the maximum weight value at the moment. However, beware that if you set it too high you can start hitting 32-bit wraparound issues. For instance, if you set a weight of 10,000,000 and search in extended mode, then maximum possible weight will be equal to 10 million (your weight) by 1 thousand (internal BM25 scaling factor, see Section 5.4, “Search results ranking”) by 1 or more (phrase proximity rank). The result is at least 10 billion that does not fit in 32 bits and will be wrapped around, producing unexpected results.

    8.3.6. SetIndexWeights

    Prototype: function SetIndexWeights ( $weights )

    Sets per-index weights, and enables weighted summing of match weights across different indexes. Parameter must be a hash (associative array) mapping string index names to integer weights. Default is empty array that means to disable weighting summing.

    When a match with the same document ID is found in several different local indexes, by default Sphinx simply chooses the match from the index specified last in the query. This is to support searching through partially overlapping index partitions.

    However in some cases the indexes are not just partitions, and you might want to sum the weights across the indexes instead of picking one. SetIndexWeights() lets you do that. With summing enabled, final match weight in result set will be computed as a sum of match weight coming from the given index multiplied by respective per-index weight specified in this call. Ie. if the document 123 is found in index A with the weight of 2, and also in index B with the weight of 3, and you called SetIndexWeights ( array ( "A"=>100, "B"=>10 ) ), the final weight return to the client will be 2*100+3*10 = 230.

    8.4. Result set filtering settings

    8.4.1. SetIDRange

    Prototype: function SetIDRange ( $min, $max )

    Sets an accepted range of document IDs. Parameters must be integers. Defaults are 0 and 0; that combination means to not limit by range.

    After this call, only those records that have document ID between $min and $max (including IDs exactly equal to $min or $max) will be matched.

    8.4.2. SetFilter

    Prototype: function SetFilter ( $attribute, $values, $exclude=false )

    Adds new integer values set filter.

    On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $values must be a plain array containing integer values. $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them.

    Only those documents where $attribute column value stored in the index matches any of the values from $values array will be matched (or rejected, if $exclude is true).

    8.4.3. SetFilterRange

    Prototype: function SetFilterRange ( $attribute, $min, $max, $exclude=false )

    Adds new integer range filter.

    On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $min and $max must be integers that define the acceptable attribute values range (including the boundaries). $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them.

    Only those documents where $attribute column value stored in the index is between $min and $max (including values that are exactly equal to $min or $max) will be matched (or rejected, if $exclude is true).

    8.4.4. SetFilterFloatRange

    Prototype: function SetFilterFloatRange ( $attribute, $min, $max, $exclude=false )

    Adds new float range filter.

    On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $min and $max must be floats that define the acceptable attribute values range (including the boundaries). $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them.

    Only those documents where $attribute column value stored in the index is between $min and $max (including values that are exactly equal to $min or $max) will be matched (or rejected, if $exclude is true).

    8.4.5. SetGeoAnchor

    Prototype: function SetGeoAnchor ( $attrlat, $attrlong, $lat, $long )

    Sets anchor point for and geosphere distance (geodistance) calculations, and enable them.

    $attrlat and $attrlong must be strings that contain the names of latitude and longitude attributes, respectively. $lat and $long are floats that specify anchor point latitude and longitude, in radians.

    Once an anchor point is set, you can use magic "@geodist" attribute name in your filters and/or sorting expressions. Sphinx will compute geosphere distance between the given anchor point and a point specified by latitude and lognitude attributes from each full-text match, and attach this value to the resulting match. The latitude and longitude values both in SetGeoAnchor and the index attribute data are expected to be in radians. The result will be returned in meters, so geodistance value of 1000.0 means 1 km. 1 mile is approximately 1609.344 meters.

    8.5. GROUP BY settings

    8.5.1. SetGroupBy

    Prototype: function SetGroupBy ( $attribute, $func, $groupsort="@group desc" )

    Sets grouping attribute, function, and groups sorting mode; and enables grouping (as described in Section 5.7, “Grouping (clustering) search results ”).

    $attribute is a string that contains group-by attribute name. $func is a constant that chooses a function applied to the attribute value in order to compute group-by key. $groupsort is a clause that controls how the groups will be sorted. Its syntax is similar to that described in Section 5.6, “SPH_SORT_EXTENDED mode”.

    Grouping feature is very similar in nature to GROUP BY clause from SQL. Results produces by this function call are going to be the same as produced by the following pseudo code:

    SELECT ... GROUP BY $func($attribute) ORDER BY $groupsort
    

    Note that it's $groupsort that affects the order of matches in the final result set. Sorting mode (see Section 8.3.3, “SetSortMode”) affect the ordering of matches within group, ie. what match will be selected as the best one from the group. So you can for instance order the groups by matches count and select the most relevant match within each group at the same time.

    Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported through SetSelect() API call when using GROUP BY.

    Starting with version 2.0.1-beta, grouping on string attributes is supported, with respect to current collation.

    8.5.2. SetGroupDistinct

    Prototype: function SetGroupDistinct ( $attribute )

    Sets attribute name for per-group distinct values count calculations. Only available for grouping queries.

    $attribute is a string that contains the attribute name. For each group, all values of this attribute will be stored (as RAM limits permit), then the amount of distinct values will be calculated and returned to the client. This feature is similar to COUNT(DISTINCT) clause in standard SQL; so these Sphinx calls:

    $cl->SetGroupBy ( "category", SPH_GROUPBY_ATTR, "@count desc" );
    $cl->SetGroupDistinct ( "vendor" );
    

    can be expressed using the following SQL clauses:

    SELECT id, weight, all-attributes,
    	COUNT(DISTINCT vendor) AS @distinct,
    	COUNT(*) AS @count
    FROM products
    GROUP BY category
    ORDER BY @count DESC
    

    In the sample pseudo code shown just above, SetGroupDistinct() call corresponds to COUNT(DISINCT vendor) clause only. GROUP BY, ORDER BY, and COUNT(*) clauses are all an equivalent of SetGroupBy() settings. Both queries will return one matching row for each category. In addition to indexed attributes, matches will also contain total per-category matches count, and the count of distinct vendor IDs within each category.

    8.6. Querying

    8.6.1. Query

    Prototype: function Query ( $query, $index="*", $comment="" )

    Connects to searchd server, runs given search query with current settings, obtains and returns the result set.

    $query is a query string. $index is an index name (or names) string. Returns false and sets GetLastError() message on general error. Returns search result set on success. Additionally, the contents of $comment are sent to the query log, marked in square brackets, just before the search terms, which can be very useful for debugging. Currently, the comment is limited to 128 characters.

    Default value for $index is "*" that means to query all local indexes. Characters allowed in index names include Latin letters (a-z), numbers (0-9), minus sign (-), and underscore (_); everything else is considered a separator. Therefore, all of the following samples calls are valid and will search the same two indexes:

    $cl->Query ( "test query", "main delta" );
    $cl->Query ( "test query", "main;delta" );
    $cl->Query ( "test query", "main, delta" );
    

    Index specification order matters. If document with identical IDs are found in two or more indexes, weight and attribute values from the very last matching index will be used for sorting and returning to client (unless explicitly overridden with SetIndexWeights()). Therefore, in the example above, matches from "delta" index will always win over matches from "main".

    On success, Query() returns a result set that contains some of the found matches (as requested by SetLimits()) and additional general per-query statistics. The result set is a hash (PHP specific; other languages might utilize other structures instead of hash) with the following keys and values:

    "matches":

    Hash which maps found document IDs to another small hash containing document weight and attribute values (or an array of the similar small hashes if SetArrayResult() was enabled).

    "total":

    Total amount of matches retrieved on server (ie. to the server side result set) by this query. You can retrieve up to this amount of matches from server for this query text with current query settings.

    "total_found":

    Total amount of matching documents in index (that were found and procesed on server).

    "words":

    Hash which maps query keywords (case-folded, stemmed, and otherwise processed) to a small hash with per-keyword statitics ("docs", "hits").

    "error":

    Query error message reported by searchd (string, human readable). Empty if there were no errors.

    "warning":

    Query warning message reported by searchd (string, human readable). Empty if there were no warnings.

    It should be noted that Query() carries out the same actions as AddQuery() and RunQueries() without the intermediate steps; it is analoguous to a single AddQuery() call, followed by a corresponding RunQueries(), then returning the first array element of matches (from the first, and only, query.)

    8.6.2. AddQuery

    Prototype: function AddQuery ( $query, $index="*", $comment="" )

    Adds additional query with current settings to multi-query batch. $query is a query string. $index is an index name (or names) string. Additionally if provided, the contents of $comment are sent to the query log, marked in square brackets, just before the search terms, which can be very useful for debugging. Currently, this is limited to 128 characters. Returns index to results array returned from RunQueries().

    Batch queries (or multi-queries) enable searchd to perform internal optimizations if possible. They also reduce network connection overheads and search process creation overheads in all cases. They do not result in any additional overheads compared to simple queries. Thus, if you run several different queries from your web page, you should always consider using multi-queries.

    For instance, running the same full-text query but with different sorting or group-by settings will enable searchd to perform expensive full-text search and ranking operation only once, but compute multiple group-by results from its output.

    This can be a big saver when you need to display not just plain search results but also some per-category counts, such as the amount of products grouped by vendor. Without multi-query, you would have to run several queries which perform essentially the same search and retrieve the same matches, but create result sets differently. With multi-query, you simply pass all these querys in a single batch and Sphinx optimizes the redundant full-text search internally.

    AddQuery() internally saves full current settings state along with the query, and you can safely change them afterwards for subsequent AddQuery() calls. Already added queries will not be affected; there's actually no way to change them at all. Here's an example:

    $cl->SetSortMode ( SPH_SORT_RELEVANCE );
    $cl->AddQuery ( "hello world", "documents" );
    
    $cl->SetSortMode ( SPH_SORT_ATTR_DESC, "price" );
    $cl->AddQuery ( "ipod", "products" );
    
    $cl->AddQuery ( "harry potter", "books" );
    
    $results = $cl->RunQueries ();
    

    With the code above, 1st query will search for "hello world" in "documents" index and sort results by relevance, 2nd query will search for "ipod" in "products" index and sort results by price, and 3rd query will search for "harry potter" in "books" index while still sorting by price. Note that 2nd SetSortMode() call does not affect the first query (because it's already added) but affects both other subsequent queries.

    Additionally, any filters set up before an AddQuery() will fall through to subsequent queries. So, if SetFilter() is called before the first query, the same filter will be in place for the second (and subsequent) queries batched through AddQuery() unless you call ResetFilters() first. Alternatively, you can add additional filters as well.

    This would also be true for grouping options and sorting options; no current sorting, filtering, and grouping settings are affected by this call; so subsequent queries will reuse current query settings.

    AddQuery() returns an index into an array of results that will be returned from RunQueries() call. It is simply a sequentially increasing 0-based integer, ie. first call will return 0, second will return 1, and so on. Just a small helper so you won't have to track the indexes manualy if you need then.

    8.6.3. RunQueries

    Prototype: function RunQueries ()

    Connect to searchd, runs a batch of all queries added using AddQuery(), obtains and returns the result sets. Returns false and sets GetLastError() message on general error (such as network I/O failure). Returns a plain array of result sets on success.

    Each result set in the returned array is exactly the same as the result set returned from Query().

    Note that the batch query request itself almost always succeds - unless there's a network error, blocking index rotation in progress, or another general failure which prevents the whole request from being processed.

    However individual queries within the batch might very well fail. In this case their respective result sets will contain non-empty "error" message, but no matches or query statistics. In the extreme case all queries within the batch could fail. There still will be no general error reported, because API was able to succesfully connect to searchd, submit the batch, and receive the results - but every result set will have a specific error message.

    8.6.4. ResetFilters

    Prototype: function ResetFilters ()

    Clears all currently set filters.

    This call is only normally required when using multi-queries. You might want to set different filters for different queries in the batch. To do that, you should call ResetFilters() and add new filters using the respective calls.

    8.6.5. ResetGroupBy

    Prototype: function ResetGroupBy ()

    Clears all currently group-by settings, and disables group-by.

    This call is only normally required when using multi-queries. You can change individual group-by settings using SetGroupBy() and SetGroupDistinct() calls, but you can not disable group-by using those calls. ResetGroupBy() fully resets previous group-by settings and disables group-by mode in the current state, so that subsequent AddQuery() calls can perform non-grouping searches.

    8.7. Additional functionality

    8.7.1. BuildExcerpts

    Prototype: function BuildExcerpts ( $docs, $index, $words, $opts=array() )

    Excerpts (snippets) builder function. Connects to searchd, asks it to generate excerpts (snippets) from given documents, and returns the results.

    $docs is a plain array of strings that carry the documents' contents. $index is an index name string. Different settings (such as charset, morphology, wordforms) from given index will be used. $words is a string that contains the keywords to highlight. They will be processed with respect to index settings. For instance, if English stemming is enabled in the index, "shoes" will be highlighted even if keyword is "shoe". Starting with version 0.9.9-rc1, keywords can contain wildcards, that work similarly to star-syntax available in queries. $opts is a hash which contains additional optional highlighting parameters:

    "before_match":

    A string to insert before a keyword match. Starting with version 1.10-beta, a %PASSAGE_ID% macro can be used in this string. The macro is replaced with an incrementing passage number within a current snippet. Numbering starts at 1 by default but can be overridden with "start_passage_id" option. In a multi-document call, %PASSAGE_ID% would restart at every given document. Default is "<b>".

    "after_match":

    A string to insert after a keyword match. Starting with version 1.10-beta, a %PASSAGE_ID% macro can be used in this string. Default is "</b>".

    "chunk_separator":

    A string to insert between snippet chunks (passages). Default is " ... ".

    "limit":

    Maximum snippet size, in symbols (codepoints). Integer, default is 256.

    "around":

    How much words to pick around each matching keywords block. Integer, default is 5.

    "exact_phrase":

    Whether to highlight exact query phrase matches only instead of individual keywords. Boolean, default is false.

    "single_passage":

    Whether to extract single best passage only. Boolean, default is false.

    "use_boundaries":

    Whether to additionaly break passages by phrase boundary characters, as configured in index settings with phrase_boundary directive. Boolean, default is false.

    "weight_order":

    Whether to sort the extracted passages in order of relevance (decreasing weight), or in order of appearance in the document (increasing position). Boolean, default is false.

    "query_mode":

    Added in version 1.10-beta. Whether to handle $words as a query in extended syntax, or as a bag of words (default behavior). For instance, in query mode ("one two" | "three four") will only highlight and include those occurrences "one two" or "three four" when the two words from each pair are adjacent to each other. In default mode, any single occurrence of "one", "two", "three", or "four" would be highlighted. Boolean, default is false.

    "force_all_words":

    Added in version 1.10-beta. Ignores the snippet length limit until it includes all the keywords. Boolean, default is false.

    "limit_passages":

    Added in version 1.10-beta. Limits the maximum number of passages that can be included into the snippet. Integer, default is 0 (no limit).

    "limit_words":

    Added in version 1.10-beta. Limits the maximum number of keywords that can be included into the snippet. Integer, default is 0 (no limit).

    "start_passage_id":

    Added in version 1.10-beta. Specifies the starting value of %PASSAGE_ID% macro (that gets detected and expanded in before_match, after_match strings). Integer, default is 1.

    "load_files":

    Added in version 1.10-beta. Whether to handle $docs as data to extract snippets from (default behavior), or to treat it as file names, and load data from specified files on the server side. Starting with version 2.0.1-beta, up to dist_threads worker threads per request will be created to parallelize the work when this flag is enabled. Boolean, default is false. Starting with version 2.0.2-beta, building of the snippets could be parallelized between remote agents. Just set the 'dist_threads' param in the config to the value greater than 1, and then invoke the snippets generation over the distributed index, which contain only one(!) first(!) local agent and several remotes.

    "load_files_scattered":

    Added in version 2.0.2-beta. It works only with distributed snippets generation with remote agents. The source files for snippets could be distributed among different agents, and the main daemon will merge together all non-erroneous results. So, if one agent of the distributed index has 'file1.txt', another has 'file2.txt' and you call for the snippets with both these files, the sphinx will merge results from the agents together, so you will get the snippets from both 'file1.txt' and 'file2.txt'. Boolean, default is false.

    If the "load_files" is also set, the request will return the error in case if any of the files is not available anywhere. Otherwise (if "load_files" is not set) it will just return the empty strings for all absent files. The master instance reset this flag when distributes the snippets among agents. So, for agents the absence of a file is not critical error, but for the master it might be so. If you want to be sure that all snippets are actually created, set both "load_files_scattered" and "load_files". If the absense of some snippets caused by some agents is not critical for you - set just "load_files_scattered", leaving "load_files" not set.

    "html_strip_mode":

    Added in version 1.10-beta. HTML stripping mode setting. Defaults to "index", which means that index settings will be used. The other values are "none" and "strip", that forcibly skip or apply stripping irregardless of index settings; and "retain", that retains HTML markup and protects it from highlighting. The "retain" mode can only be used when highlighting full documents and thus requires that no snippet size limits are set. String, allowed values are "none", "strip", "index", and "retain".

    "allow_empty":

    Added in version 1.10-beta. Allows empty string to be returned as highlighting result when a snippet could not be generated (no keywords match, or no passages fit the limit). By default, the beginning of original text would be returned instead of an empty string. Boolean, default is false.

    "passage_boundary":

    Added in version 2.0.1-beta. Ensures that passages do not cross a sentence, paragraph, or zone boundary (when used with an index that has the respective indexing settings enabled). String, allowed values are "sentence", "paragraph", and "zone".

    "emit_zones":

    Added in version 2.0.1-beta. Emits an HTML tag with an enclosing zone name before each passage. Boolean, default is false.

    Snippets extraction algorithm currently favors better passages (with closer phrase matches), and then passages with keywords not yet in snippet. Generally, it will try to highlight the best match with the query, and it will also to highlight all the query keywords, as made possible by the limtis. In case the document does not match the query, beginning of the document trimmed down according to the limits will be return by default. Starting with 1.10-beta, you can also return an empty snippet instead case by setting "allow_empty" option to true.

    Returns false on failure. Returns a plain array of strings with excerpts (snippets) on success.

    8.7.2. UpdateAttributes

    Prototype: function UpdateAttributes ( $index, $attrs, $values )

    Instantly updates given attribute values in given documents. Returns number of actually updated documents (0 or more) on success, or -1 on failure.

    $index is a name of the index (or indexes) to be updated. $attrs is a plain array with string attribute names, listing attributes that are updated. $values is a hash where key is document ID, and value is a plain array of new attribute values.

    $index can be either a single index name or a list, like in Query(). Unlike Query(), wildcard is not allowed and all the indexes to update must be specified explicitly. The list of indexes can include distributed index names. Updates on distributed indexes will be pushed to all agents.

    The updates only work with docinfo=extern storage strategy. They are very fast because they're working fully in RAM, but they can also be made persistent: updates are saved on disk on clean searchd shutdown initiated by SIGTERM signal. With additional restrictions, updates are also possible on MVA attributes; refer to mva_updates_pool directive for details.

    Usage example:

    $cl->UpdateAttributes ( "test1", array("group_id"), array(1=>array(456)) );
    $cl->UpdateAttributes ( "products", array ( "price", "amount_in_stock" ),
    	array ( 1001=>array(123,5), 1002=>array(37,11), 1003=>(25,129) ) );
    

    The first sample statement will update document 1 in index "test1", setting "group_id" to 456. The second one will update documents 1001, 1002 and 1003 in index "products". For document 1001, the new price will be set to 123 and the new amount in stock to 5; for document 1002, the new price will be 37 and the new amount will be 11; etc.

    8.7.3. BuildKeywords

    Prototype: function BuildKeywords ( $query, $index, $hits )

    Extracts keywords from query using tokenizer settings for given index, optionally with per-keyword occurrence statistics. Returns an array of hashes with per-keyword information.

    $query is a query to extract keywords from. $index is a name of the index to get tokenizing settings and keyword occurrence statistics from. $hits is a boolean flag that indicates whether keyword occurrence statistics are required.

    Usage example:

    $keywords = $cl->BuildKeywords ( "this.is.my query", "test1", false );
    

    8.7.4. EscapeString

    Prototype: function EscapeString ( $string )

    Escapes characters that are treated as special operators by the query language parser. Returns an escaped string.

    $string is a string to escape.

    This function might seem redundant because it's trivial to implement in any calling application. However, as the set of special characters might change over time, it makes sense to have an API call that is guaranteed to escape all such characters at all times.

    Usage example:

    $escaped = $cl->EscapeString ( "escaping-sample@query/string" );
    

    8.7.5. Status

    Prototype: function Status ()

    Queries searchd status, and returns an array of status variable name and value pairs.

    Usage example:

    $status = $cl->Status ();
    foreach ( $status as $row )
    	print join ( ": ", $row ) . "\n";
    

    8.7.6. FlushAttributes

    Prototype: function FlushAttributes ()

    Forces searchd to flush pending attribute updates to disk, and blocks until completion. Returns a non-negative internal "flush tag" on success. Returns -1 and sets an error message on error. Introduced in version 1.10-beta.

    Attribute values updated using UpdateAttributes() API call are only kept in RAM until a so-called flush (which writes the current, possibly updated attribute values back to disk). FlushAttributes() call lets you enforce a flush. The call will block until searchd finishes writing the data to disk, which might take seconds or even minutes depending on the total data size (.spa file size). All the currently updated indexes will be flushed.

    Flush tag should be treated as an ever growing magic number that does not mean anything. It's guaranteed to be non-negative. It is guaranteed to grow over time, though not necessarily in a sequential fashion; for instance, two calls that return 10 and then 1000 respectively are a valid situation. If two calls to FlushAttrs() return the same tag, it means that there were no actual attribute updates in between them, and therefore current flushed state remained the same (for all indexes).

    Usage example:

    $status = $cl->FlushAttributes ();
    if ( $status<0 )
    	print "ERROR: " . $cl->GetLastError(); 
    

    8.8. Persistent connections

    Persistent connections allow to use single network connection to run multiple commands that would otherwise require reconnects.

    8.8.1. Open

    Prototype: function Open ()

    Opens persistent connection to the server.

    8.8.2. Close

    Prototype: function Close ()

    Closes previously opened persistent connection.

    Chapter 9. MySQL storage engine (SphinxSE)

    9.1. SphinxSE overview

    SphinxSE is MySQL storage engine which can be compiled into MySQL server 5.x using its pluggable architecure. It is not available for MySQL 4.x series. It also requires MySQL 5.0.22 or higher in 5.0.x series, or MySQL 5.1.12 or higher in 5.1.x series.

    Despite the name, SphinxSE does not actually store any data itself. It is actually a built-in client which allows MySQL server to talk to searchd, run search queries, and obtain search results. All indexing and searching happen outside MySQL.

    Obvious SphinxSE applications include:

    • easier porting of MySQL FTS applications to Sphinx;

    • allowing Sphinx use with progamming languages for which native APIs are not available yet;

    • optimizations when additional Sphinx result set processing on MySQL side is required (eg. JOINs with original document tables, additional MySQL-side filtering, etc).

    9.2. Installing SphinxSE

    You will need to obtain a copy of MySQL sources, prepare those, and then recompile MySQL binary. MySQL sources (mysql-5.x.yy.tar.gz) could be obtained from dev.mysql.com Web site.

    For some MySQL versions, there are delta tarballs with already prepared source versions available from Sphinx Web site. After unzipping those over original sources MySQL would be ready to be configured and built with Sphinx support.

    If such tarball is not available, or does not work for you for any reason, you would have to prepare sources manually. You will need to GNU Autotools framework (autoconf, automake and libtool) installed to do that.

    9.2.1. Compiling MySQL 5.0.x with SphinxSE

    1. copy sphinx.5.0.yy.diff patch file into MySQL sources directory and run

      patch -p1 < sphinx.5.0.yy.diff
      

      If there's no .diff file exactly for the specific version you need to build, try applying .diff with closest version numbers. It is important that the patch should apply with no rejects.

    2. in MySQL sources directory, run

      sh BUILD/autorun.sh
      

    3. in MySQL sources directory, create sql/sphinx directory in and copy all files in mysqlse directory from Sphinx sources there. Example:

      cp -R /root/builds/sphinx-0.9.7/mysqlse /root/builds/mysql-5.0.24/sql/sphinx
      

    4. configure MySQL and enable Sphinx engine:

      ./configure --with-sphinx-storage-engine
      

    5. build and install MySQL:

      make
      make install
      

    9.2.2. Compiling MySQL 5.1.x with SphinxSE

    1. in MySQL sources directory, create storage/sphinx directory in and copy all files in mysqlse directory from Sphinx sources there. Example:

      cp -R /root/builds/sphinx-0.9.7/mysqlse /root/builds/mysql-5.1.14/storage/sphinx
      

    2. in MySQL sources directory, run

      sh BUILD/autorun.sh
      

    3. configure MySQL and enable Sphinx engine:

      ./configure --with-plugins=sphinx
      

    4. build and install MySQL:

      make
      make install
      

    9.2.3. Checking SphinxSE installation

    To check whether SphinxSE has been succesfully compiled into MySQL, launch newly built servers, run mysql client and issue SHOW ENGINES query. You should see a list of all available engines. Sphinx should be present and "Support" column should contain "YES":

    mysql> show engines;
    +------------+----------+-------------------------------------------------------------+
    | Engine     | Support  | Comment                                                     |
    +------------+----------+-------------------------------------------------------------+
    | MyISAM     | DEFAULT  | Default engine as of MySQL 3.23 with great performance      |
      ...
    | SPHINX     | YES      | Sphinx storage engine                                       |
      ...
    +------------+----------+-------------------------------------------------------------+
    13 rows in set (0.00 sec)
    

    9.3. Using SphinxSE

    To search via SphinxSE, you would need to create special ENGINE=SPHINX "search table", and then SELECT from it with full text query put into WHERE clause for query column.

    Let's begin with an example create statement and search query:

    CREATE TABLE t1
    (
        id          INTEGER UNSIGNED NOT NULL,
        weight      INTEGER NOT NULL,
        query       VARCHAR(3072) NOT NULL,
        group_id    INTEGER,
        INDEX(query)
    ) ENGINE=SPHINX CONNECTION="sphinx://localhost:9312/test";
    
    SELECT * FROM t1 WHERE query='test it;mode=any';
    

    First 3 columns of search table must have a types of INTEGER UNSINGED or BIGINT for the 1st column (document id), INTEGER or BIGINT for the 2nd column (match weight), and VARCHAR or TEXT for the 3rd column (your query), respectively. This mapping is fixed; you can not omit any of these three required columns, or move them around, or change types. Also, query column must be indexed; all the others must be kept unindexed. Columns' names are ignored so you can use arbitrary ones.

    Additional columns must be either INTEGER, TIMESTAMP, BIGINT, VARCHAR, or FLOAT. They will be bound to attributes provided in Sphinx result set by name, so their names must match attribute names specified in sphinx.conf. If there's no such attribute name in Sphinx search results, column will have NULL values.

    Special "virtual" attributes names can also be bound to SphinxSE columns. _sph_ needs to be used instead of @ for that. For instance, to obtain the values of @groupby, @count, or @distinct virtual attributes, use _sph_groupby, _sph_count or _sph_distinct column names, respectively.

    CONNECTION string parameter can be used to specify default searchd host, port and indexes for queries issued using this table. If no connection string is specified in CREATE TABLE, index name "*" (ie. search all indexes) and localhost:9312 are assumed. Connection string syntax is as follows:

    CONNECTION="sphinx://HOST:PORT/INDEXNAME"
    

    You can change the default connection string later:

    ALTER TABLE t1 CONNECTION="sphinx://NEWHOST:NEWPORT/NEWINDEXNAME";
    

    You can also override all these parameters per-query.

    As seen in example, both query text and search options should be put into WHERE clause on search query column (ie. 3rd column); the options are separated by semicolons; and their names from values by equality sign. Any number of options can be specified. Available options are:

    • query - query text;

    • mode - matching mode. Must be one of "all", "any", "phrase", "boolean", or "extended". Default is "all";

    • sort - match sorting mode. Must be one of "relevance", "attr_desc", "attr_asc", "time_segments", or "extended". In all modes besides "relevance" attribute name (or sorting clause for "extended") is also required after a colon:

      ... WHERE query='test;sort=attr_asc:group_id';
      ... WHERE query='test;sort=extended:@weight desc, group_id asc';
      

    • offset - offset into result set, default is 0;

    • limit - amount of matches to retrieve from result set, default is 20;

    • index - names of the indexes to search:

      ... WHERE query='test;index=test1;';
      ... WHERE query='test;index=test1,test2,test3;';
      

    • minid, maxid - min and max document ID to match;

    • weights - comma-separated list of weights to be assigned to Sphinx full-text fields:

      ... WHERE query='test;weights=1,2,3;';
      

    • filter, !filter - comma-separated attribute name and a set of values to match:

      # only include groups 1, 5 and 19
      ... WHERE query='test;filter=group_id,1,5,19;';
      
      # exclude groups 3 and 11
      ... WHERE query='test;!filter=group_id,3,11;';
      

    • range, !range - comma-separated attribute name, min and max value to match:

      # include groups from 3 to 7, inclusive
      ... WHERE query='test;range=group_id,3,7;';
      
      # exclude groups from 5 to 25
      ... WHERE query='test;!range=group_id,5,25;';
      

    • maxmatches - per-query max matches value, as in max_matches parameter to SetLimits() API call:

      ... WHERE query='test;maxmatches=2000;';
      

    • cutoff - maximum allowed matches, as in cutoff parameter to SetLimits() API call:

      ... WHERE query='test;cutoff=10000;';
      

    • maxquerytme - maximum allowed query time (in milliseconds), as in SetMaxQueryTime() API call:

      ... WHERE query='test;maxquerytime=1000;';
      

    • groupby - group-by function and attribute, corresponding to SetGroupBy() API call:

      ... WHERE query='test;groupby=day:published_ts;';
      ... WHERE query='test;groupby=attr:group_id;';
      

    • groupsort - group-by sorting clause:

      ... WHERE query='test;groupsort=@count desc;';
      

    • distinct - an attribute to compute COUNT(DISTINCT) for when doing group-by, as in SetGroupDistinct() API call:

      ... WHERE query='test;groupby=attr:country_id;distinct=site_id';
      

    • indexweights - comma-separated list of index names and weights to use when searching through several indexes:

      ... WHERE query='test;indexweights=idx_exact,2,idx_stemmed,1;';
      

    • comment - a string to mark this query in query log (mapping to $comment parameter in Query() API call):

      ... WHERE query='test;comment=marker001;';
      

    • select - a string with expressions to compute (mapping to SetSelect() API call):

      ... WHERE query='test;select=2*a+3*b as myexpr;';
      

    • host, port - remote searchd host name and TCP port, respectively:

      ... WHERE query='test;host=sphinx-test.loc;port=7312;';
      

    • ranker - a ranking function to use with "extended" matching mode, as in SetRankingMode() API call (the only mode that supports full query syntax). Known values are "proximity_bm25", "bm25", "none", "wordcount", "proximity", "matchany", "fieldmask"; and, starting with 2.0.4-release, "expr:EXPRESSION" syntax to support expression-based ranker (where EXPRESSION should be replaced with your specific ranking formula):

      ... WHERE query='test;mode=extended;ranker=bm25;';
      ... WHERE query='test;mode=extended;ranker=expr:sum(lcs);';
      

    • geoanchor - geodistance anchor, as in SetGeoAnchor() API call. Takes 4 parameters which are latitude and longiture attribute names, and anchor point coordinates respectively:

      ... WHERE query='test;geoanchor=latattr,lonattr,0.123,0.456';
      

    One very important note that it is much more efficient to allow Sphinx to perform sorting, filtering and slicing the result set than to raise max matches count and use WHERE, ORDER BY and LIMIT clauses on MySQL side. This is for two reasons. First, Sphinx does a number of optimizations and performs better than MySQL on these tasks. Second, less data would need to be packed by searchd, transferred and unpacked by SphinxSE.

    Starting with version 0.9.9-rc1, additional query info besides result set could be retrieved with SHOW ENGINE SPHINX STATUS statement:

    mysql> SHOW ENGINE SPHINX STATUS;
    +--------+-------+-------------------------------------------------+
    | Type   | Name  | Status                                          |
    +--------+-------+-------------------------------------------------+
    | SPHINX | stats | total: 25, total found: 25, time: 126, words: 2 | 
    | SPHINX | words | sphinx:591:1256 soft:11076:15945                | 
    +--------+-------+-------------------------------------------------+
    2 rows in set (0.00 sec)
    

    This information can also be accessed through status variables. Note that this method does not require super-user privileges.

    mysql> SHOW STATUS LIKE 'sphinx_%';
    +--------------------+----------------------------------+
    | Variable_name      | Value                            |
    +--------------------+----------------------------------+
    | sphinx_total       | 25                               | 
    | sphinx_total_found | 25                               | 
    | sphinx_time        | 126                              | 
    | sphinx_word_count  | 2                                | 
    | sphinx_words       | sphinx:591:1256 soft:11076:15945 | 
    +--------------------+----------------------------------+
    5 rows in set (0.00 sec)
    

    You could perform JOINs on SphinxSE search table and tables using other engines. Here's an example with "documents" from example.sql:

    mysql> SELECT content, date_added FROM test.documents docs
    -> JOIN t1 ON (docs.id=t1.id) 
    -> WHERE query="one document;mode=any";
    +-------------------------------------+---------------------+
    | content                             | docdate             |
    +-------------------------------------+---------------------+
    | this is my test document number two | 2006-06-17 14:04:28 | 
    | this is my test document number one | 2006-06-17 14:04:28 | 
    +-------------------------------------+---------------------+
    2 rows in set (0.00 sec)
    
    mysql> SHOW ENGINE SPHINX STATUS;
    +--------+-------+---------------------------------------------+
    | Type   | Name  | Status                                      |
    +--------+-------+---------------------------------------------+
    | SPHINX | stats | total: 2, total found: 2, time: 0, words: 2 | 
    | SPHINX | words | one:1:2 document:2:2                        | 
    +--------+-------+---------------------------------------------+
    2 rows in set (0.00 sec)
    

    9.4. Building snippets (excerpts) via MySQL

    Starting with version 0.9.9-rc2, SphinxSE also includes a UDF function that lets you create snippets through MySQL. The functionality is fully similar to BuildExcerprts API call but accesible through MySQL+SphinxSE.

    The binary that provides the UDF is named sphinx.so and should be automatically built and installed to proper location along with SphinxSE itself. If it does not get installed automatically for some reason, look for sphinx.so in the build directory and copy it to the plugins directory of your MySQL instance. After that, register the UDF using the following statement:

    CREATE FUNCTION sphinx_snippets RETURNS STRING SONAME 'sphinx.so';
    

    Function name must be sphinx_snippets, you can not use an arbitrary name. Function arguments are as follows:

    Prototype: function sphinx_snippets ( document, index, words, [options] );

    Document and words arguments can be either strings or table columns. Options must be specified like this: 'value' AS option_name. For a list of supported options, refer to BuildExcerprts() API call. The only UDF-specific additional option is named 'sphinx' and lets you specify searchd location (host and port).

    Usage examples:

    SELECT sphinx_snippets('hello world doc', 'main', 'world',
        'sphinx://192.168.1.1/' AS sphinx, true AS exact_phrase,
        '[b]' AS before_match, '[/b]' AS after_match)
    FROM documents;
    
    SELECT title, sphinx_snippets(text, 'index', 'mysql php') AS text
        FROM sphinx, documents
        WHERE query='mysql php' AND sphinx.id=documents.id;
    

    Chapter 10. Reporting bugs

    Unfortunately, Sphinx is not yet 100% bug free (even though I'm working hard towards that), so you might occasionally run into some issues.

    Reporting as much as possible about each bug is very important - because to fix it, I need to be able either to reproduce and debug the bug, or to deduce what's causing it from the information that you provide. So here are some instructions on how to do that.

    Build-time issues

    If Sphinx fails to build for some reason, please do the following:

    1. check that headers and libraries for your DBMS are properly installed (for instance, check that mysql-devel package is present);

    2. report Sphinx version and config file (be sure to remove the passwords!), MySQL (or PostgreSQL) configuration info, gcc version, OS version and CPU type (ie. x86, x86-64, PowerPC, etc):

      mysql_config
      gcc --version
      uname -a
      

    3. report the error message which is produced by configure or gcc (it should be to include error message itself only, not the whole build log).

    Run-time issues

    If Sphinx builds and runs, but there are any problems running it, please do the following:

    1. describe the bug (ie. both the expected behavior and actual behavior) and all the steps necessary to reproduce it;

    2. include Sphinx version and config file (be sure to remove the passwords!), MySQL (or PostgreSQL) version, gcc version, OS version and CPU type (ie. x86, x86-64, PowerPC, etc):

      mysql --version
      gcc --version
      uname -a
      

    3. build, install and run debug versions of all Sphinx programs (this is to enable a lot of additional internal checks, so-called assertions):

      make distclean
      ./configure --with-debug
      make install
      killall -TERM searchd
      

    4. reindex to check if any assertions are triggered (in this case, it's likely that the index is corrupted and causing problems);

    5. if the bug does not reproduce with debug versions, revert to non-debug and mention it in your report;

    6. if the bug could be easily reproduced with a small (1-100 record) part of your database, please provide a gzipped dump of that part;

    7. if the problem is related to searchd, include relevant entries from searchd.log and query.log in your bug report;

    8. if the problem is related to searchd, try running it in console mode and check if it dies with an assertion:

      ./searchd --console
      

    9. if any program dies with an assertion, provide the assertion message.

    Debugging assertions, crashes and hangups

    If any program dies with an assertion, crashes without an assertion or hangs up, you would additionally need to generate a core dump and examine it.

    1. enable core dumps. On most Linux systems, this is done using ulimit:

      ulimit -c 32768
      

    2. run the program and try to reproduce the bug;

    3. if the program crashes (either with or without an assertion), find the core file in current directory (it should typically print out "Segmentation fault (core dumped)" message);

    4. if the program hangs, use kill -SEGV from another console to force it to exit and dump core:

      kill -SEGV HANGED-PROCESS-ID
      

    5. use gdb to examine the core file and obtain a backtrace:

      gdb ./CRASHED-PROGRAM-FILE-NAME CORE-DUMP-FILE-NAME
      (gdb) bt
      (gdb) quit
      

    Note that HANGED-PROCESS-ID, CRASHED-PROGRAM-FILE-NAME and CORE-DUMP-FILE-NAME must all be replaced with specific numbers and file names. For example, hanged searchd debugging session would look like:

    # kill -SEGV 12345
    # ls *core*
    core.12345
    # gdb ./searchd core.12345
    (gdb) bt
    ...
    (gdb) quit
    

    Note that ulimit is not server-wide and only affects current shell session. This means that you will not have to restore any server-wide limits - but if you relogin, you will have to set ulimit again.

    Core dumps should be placed in current working directory (and Sphinx programs do not change it), so this is where you would look for them.

    Please do not immediately remove the core file because there could be additional helpful information which could be retrieved from it. You do not need to send me this file (as the debug info there is closely tied to your system) but I might need to ask you a few additional questions about it.

    Chapter 11. sphinx.conf options reference

    Table of Contents

    11.1. Data source configuration options
    11.1.1. type
    11.1.2. sql_host
    11.1.3. sql_port
    11.1.4. sql_user
    11.1.5. sql_pass
    11.1.6. sql_db
    11.1.7. sql_sock
    11.1.8. mysql_connect_flags
    11.1.9. mysql_ssl_cert, mysql_ssl_key, mysql_ssl_ca
    11.1.10. odbc_dsn
    11.1.11. sql_query_pre
    11.1.12. sql_query
    11.1.13. sql_joined_field
    11.1.14. sql_query_range
    11.1.15. sql_range_step
    11.1.16. sql_query_killlist
    11.1.17. sql_attr_uint
    11.1.18. sql_attr_bool
    11.1.19. sql_attr_bigint
    11.1.20. sql_attr_timestamp
    11.1.21. sql_attr_str2ordinal
    11.1.22. sql_attr_float
    11.1.23. sql_attr_multi
    11.1.24. sql_attr_string
    11.1.25. sql_attr_str2wordcount
    11.1.26. sql_column_buffers
    11.1.27. sql_field_string
    11.1.28. sql_field_str2wordcount
    11.1.29. sql_file_field
    11.1.30. sql_query_post
    11.1.31. sql_query_post_index
    11.1.32. sql_ranged_throttle
    11.1.33. sql_query_info
    11.1.34. xmlpipe_command
    11.1.35. xmlpipe_field
    11.1.36. xmlpipe_field_string
    11.1.37. xmlpipe_field_wordcount
    11.1.38. xmlpipe_attr_uint
    11.1.39. xmlpipe_attr_bigint
    11.1.40. xmlpipe_attr_bool
    11.1.41. xmlpipe_attr_timestamp
    11.1.42. xmlpipe_attr_str2ordinal
    11.1.43. xmlpipe_attr_float
    11.1.44. xmlpipe_attr_multi
    11.1.45. xmlpipe_attr_multi_64
    11.1.46. xmlpipe_attr_string
    11.1.47. xmlpipe_fixup_utf8
    11.1.48. mssql_winauth
    11.1.49. mssql_unicode
    11.1.50. unpack_zlib
    11.1.51. unpack_mysqlcompress
    11.1.52. unpack_mysqlcompress_maxsize
    11.2. Index configuration options
    11.2.1. type
    11.2.2. source
    11.2.3. path
    11.2.4. docinfo
    11.2.5. mlock
    11.2.6. morphology
    11.2.7. dict
    11.2.8. index_sp
    11.2.9. index_zones
    11.2.10. min_stemming_len
    11.2.11. stopwords
    11.2.12. wordforms
    11.2.13. exceptions
    11.2.14. min_word_len
    11.2.15. charset_type
    11.2.16. charset_table
    11.2.17. ignore_chars
    11.2.18. min_prefix_len
    11.2.19. min_infix_len
    11.2.20. prefix_fields
    11.2.21. infix_fields
    11.2.22. enable_star
    11.2.23. ngram_len
    11.2.24. ngram_chars
    11.2.25. phrase_boundary
    11.2.26. phrase_boundary_step
    11.2.27. html_strip
    11.2.28. html_index_attrs
    11.2.29. html_remove_elements
    11.2.30. local
    11.2.31. agent
    11.2.32. agent_blackhole
    11.2.33. agent_connect_timeout
    11.2.34. agent_query_timeout
    11.2.35. preopen
    11.2.36. ondisk_dict
    11.2.37. inplace_enable
    11.2.38. inplace_hit_gap
    11.2.39. inplace_docinfo_gap
    11.2.40. inplace_reloc_factor
    11.2.41. inplace_write_factor
    11.2.42. index_exact_words
    11.2.43. overshort_step
    11.2.44. stopword_step
    11.2.45. hitless_words
    11.2.46. expand_keywords
    11.2.47. blend_chars
    11.2.48. blend_mode
    11.2.49. rt_mem_limit
    11.2.50. rt_field
    11.2.51. rt_attr_uint
    11.2.52. rt_attr_bigint
    11.2.53. rt_attr_float
    11.2.54. rt_attr_multi
    11.2.55. rt_attr_multi_64
    11.2.56. rt_attr_timestamp
    11.2.57. rt_attr_string
    11.3. indexer program configuration options
    11.3.1. mem_limit
    11.3.2. max_iops
    11.3.3. max_iosize
    11.3.4. max_xmlpipe2_field
    11.3.5. write_buffer
    11.3.6. max_file_field_buffer
    11.3.7. on_file_field_error
    11.4. searchd program configuration options
    11.4.1. listen
    11.4.2. address
    11.4.3. port
    11.4.4. log
    11.4.5. query_log
    11.4.6. query_log_format
    11.4.7. read_timeout
    11.4.8. client_timeout
    11.4.9. max_children
    11.4.10. pid_file
    11.4.11. max_matches
    11.4.12. seamless_rotate
    11.4.13. preopen_indexes
    11.4.14. unlink_old
    11.4.15. attr_flush_period
    11.4.16. ondisk_dict_default
    11.4.17. max_packet_size
    11.4.18. mva_updates_pool
    11.4.19. crash_log_path
    11.4.20. max_filters
    11.4.21. max_filter_values
    11.4.22. listen_backlog
    11.4.23. read_buffer
    11.4.24. read_unhinted
    11.4.25. max_batch_queries
    11.4.26. subtree_docs_cache
    11.4.27. subtree_hits_cache
    11.4.28. workers
    11.4.29. dist_threads
    11.4.30. binlog_path
    11.4.31. binlog_flush
    11.4.32. binlog_max_log_size
    11.4.33. collation_server
    11.4.34. collation_libc_locale
    11.4.35. plugin_dir
    11.4.36. mysql_version_string
    11.4.37. rt_flush_period
    11.4.38. thread_stack
    11.4.39. expansion_limit
    11.4.40. compat_sphinxql_magics
    11.4.41. watchdog
    11.4.42. prefork_rotation_throttle

    11.1. Data source configuration options

    11.1.1. type

    Data source type. Mandatory, no default value. Known types are mysql, pgsql, mssql, xmlpipe and xmlpipe2, and odbc.

    All other per-source options depend on source type selected by this option. Names of the options used for SQL sources (ie. MySQL, PostgreSQL, MS SQL) start with "sql_"; names of the ones used for xmlpipe and xmlpipe2 start with "xmlpipe_". All source types except xmlpipe are conditional; they might or might not be supported depending on your build settings, installed client libraries, etc. mssql type is currently only available on Windows. odbc type is available both on Windows natively and on Linux through UnixODBC library.

    Example:

    type = mysql
    

    11.1.2. sql_host

    SQL server host to connect to. Mandatory, no default value. Applies to SQL source types (mysql, pgsql, mssql) only.

    In the simplest case when Sphinx resides on the same host with your MySQL or PostgreSQL installation, you would simply specify "localhost". Note that MySQL client library chooses whether to connect over TCP/IP or over UNIX socket based on the host name. Specifically "localhost" will force it to use UNIX socket (this is the default and generally recommended mode) and "127.0.0.1" will force TCP/IP usage. Refer to MySQL manual for more details.

    Example:

    sql_host = localhost
    

    11.1.3. sql_port

    SQL server IP port to connect to. Optional, default is 3306 for mysql source type and 5432 for pgsql type. Applies to SQL source types (mysql, pgsql, mssql) only. Note that it depends on sql_host setting whether this value will actually be used.

    Example:

    sql_port = 3306
    

    11.1.4. sql_user

    SQL user to use when connecting to sql_host. Mandatory, no default value. Applies to SQL source types (mysql, pgsql, mssql) only.

    Example:

    sql_user = test
    

    11.1.5. sql_pass

    SQL user password to use when connecting to sql_host. Mandatory, no default value. Applies to SQL source types (mysql, pgsql, mssql) only.

    Example:

    sql_pass = mysecretpassword
    

    11.1.6. sql_db

    SQL database (in MySQL terms) to use after the connection and perform further queries within. Mandatory, no default value. Applies to SQL source types (mysql, pgsql, mssql) only.

    Example:

    sql_db = test
    

    11.1.7. sql_sock

    UNIX socket name to connect to for local SQL servers. Optional, default value is empty (use client library default settings). Applies to SQL source types (mysql, pgsql, mssql) only.

    On Linux, it would typically be /var/lib/mysql/mysql.sock. On FreeBSD, it would typically be /tmp/mysql.sock. Note that it depends on sql_host setting whether this value will actually be used.

    Example:

    sql_sock = /tmp/mysql.sock
    

    11.1.8. mysql_connect_flags

    MySQL client connection flags. Optional, default value is 0 (do not set any flags). Applies to mysql source type only.

    This option must contain an integer value with the sum of the flags. The value will be passed to mysql_real_connect() verbatim. The flags are enumerated in mysql_com.h include file. Flags that are especially interesting in regard to indexing, with their respective values, are as follows:

    • CLIENT_COMPRESS = 32; can use compression protocol

    • CLIENT_SSL = 2048; switch to SSL after handshake

    • CLIENT_SECURE_CONNECTION = 32768; new 4.1 authentication

    For instance, you can specify 2080 (2048+32) to use both compression and SSL, or 32768 to use new authentication only. Initially, this option was introduced to be able to use compression when the indexer and mysqld are on different hosts. Compression on 1 Gbps links is most likely to hurt indexing time though it reduces network traffic, both in theory and in practice. However, enabling compression on 100 Mbps links may improve indexing time significantly (upto 20-30% of the total indexing time improvement was reported). Your mileage may vary.

    Example:

    mysql_connect_flags = 32 # enable compression
    

    11.1.9. mysql_ssl_cert, mysql_ssl_key, mysql_ssl_ca

    SSL certificate settings to use for connecting to MySQL server. Optional, default values are empty strings (do not use SSL). Applies to mysql source type only.

    These directives let you set up secure SSL connection between indexer and MySQL. The details on creating the certificates and setting up MySQL server can be found in MySQL documentation.

    Example:

    mysql_ssl_cert = /etc/ssl/client-cert.pem
    mysql_ssl_key = /etc/ssl/client-key.pem
    mysql_ssl_ca = /etc/ssl/cacert.pem
    

    11.1.10. odbc_dsn

    ODBC DSN to connect to. Mandatory, no default value. Applies to odbc source type only.

    ODBC DSN (Data Source Name) specifies the credentials (host, user, password, etc) to use when connecting to ODBC data source. The format depends on specific ODBC driver used.

    Example:

    odbc_dsn = Driver={Oracle ODBC Driver};Dbq=myDBName;Uid=myUsername;Pwd=myPassword
    

    11.1.11. sql_query_pre

    Pre-fetch query, or pre-query. Multi-value, optional, default is empty list of queries. Applies to SQL source types (mysql, pgsql, mssql) only.

    Multi-value means that you can specify several pre-queries. They are executed before the main fetch query, and they will be exectued exactly in order of appeareance in the configuration file. Pre-query results are ignored.

    Pre-queries are useful in a lot of ways. They are used to setup encoding, mark records that are going to be indexed, update internal counters, set various per-connection SQL server options and variables, and so on.

    Perhaps the most frequent pre-query usage is to specify the encoding that the server will use for the rows it returnes. It must match the encoding that Sphinx expects (as specified by charset_type and charset_table options). Two MySQL specific examples of setting the encoding are:

    sql_query_pre = SET CHARACTER_SET_RESULTS=cp1251
    sql_query_pre = SET NAMES utf8
    

    Also specific to MySQL sources, it is useful to disable query cache (for indexer connection only) in pre-query, because indexing queries are not going to be re-run frequently anyway, and there's no sense in caching their results. That could be achieved with:

    sql_query_pre = SET SESSION query_cache_type=OFF
    

    Example:

    sql_query_pre = SET NAMES utf8
    sql_query_pre = SET SESSION query_cache_type=OFF
    

    11.1.12. sql_query

    Main document fetch query. Mandatory, no default value. Applies to SQL source types (mysql, pgsql, mssql) only.

    There can be only one main query. This is the query which is used to retrieve documents from SQL server. You can specify up to 32 full-text fields (formally, upto SPH_MAX_FIELDS from sphinx.h), and an arbitrary amount of attributes. All of the columns that are neither document ID (the first one) nor attributes will be full-text indexed.

    Document ID MUST be the very first field, and it MUST BE UNIQUE UNSIGNED POSITIVE (NON-ZERO, NON-NEGATIVE) INTEGER NUMBER. It can be either 32-bit or 64-bit, depending on how you built Sphinx; by default it builds with 32-bit IDs support but --enable-id64 option to configure allows to build with 64-bit document and word IDs support.

    Example:

    sql_query = \
    	SELECT id, group_id, UNIX_TIMESTAMP(date_added) AS date_added, \
    		title, content \
    	FROM documents
    

    11.1.13. sql_joined_field

    Joined/payload field fetch query. Multi-value, optional, default is empty list of queries. Applies to SQL source types (mysql, pgsql, mssql) only.

    sql_joined_field lets you use two different features: joined fields, and payloads (payload fields). It's syntax is as follows:

    sql_joined_field = FIELD-NAME 'from'  ( 'query' | 'payload-query' ); \
        QUERY [ ; RANGE-QUERY ]
    

    where

    • FIELD-NAME is a joined/payload field name;

    • QUERY is an SQL query that must fetch values to index.

    • RANGE-QUERY is an optional SQL query that fetches a range of values to index. (Added in version 2.0.1-beta.)

    Joined fields let you avoid JOIN and/or GROUP_CONCAT statements in the main document fetch query (sql_query). This can be useful when SQL-side JOIN is slow, or needs to be offloaded on Sphinx side, or simply to emulate MySQL-specific GROUP_CONCAT funcionality in case your database server does not support it.

    The query must return exactly 2 columns: document ID, and text to append to a joined field. Document IDs can be duplicate, but they must be in ascending order. All the text rows fetched for a given ID will be concatented together, and the concatenation result will be indexed as the entire contents of a joined field. Rows will be concatenated in the order returned from the query, and separating whitespace will be inserted between them. For instance, if joined field query returns the following rows:

    ( 1, 'red' )
    ( 1, 'right' )
    ( 1, 'hand' )
    ( 2, 'mysql' )
    ( 2, 'sphinx' )
    

    then the indexing results would be equivalent to that of adding a new text field with a value of 'red right hand' to document 1 and 'mysql sphinx' to document 2.

    Joined fields are only indexed differently. There are no other differences between joined fields and regular text fields.

    Starting with 2.0.1-beta, ranged queries can be used when a single query is not efficient enough or does not work because of the database driver limitations. It works similar to the ranged queries in the main indexing loop, see Section 3.8, “Ranged queries”. The range will be queried for and fetched upfront once, then multiple queries with different $start and $end substitutions will be run to fetch the actual data.

    Payloads let you create a special field in which, instead of keyword positions, so-called user payloads are stored. Payloads are custom integer values attached to every keyword. They can then be used in search time to affect the ranking.

    The payload query must return exactly 3 columns: document ID; keyword; and integer payload value. Document IDs can be duplicate, but they must be in ascending order. Payloads must be unsigned integers within 24-bit range, ie. from 0 to 16777215. For reference, payloads are currently internally stored as in-field keyword positions, but that is not guaranteed and might change in the future.

    Currently, the only method to account for payloads is to use SPH_RANK_PROXIMITY_BM25 ranker. On indexes with payload fields, it will automatically switch to a variant that matches keywords in those fields, computes a sum of matched payloads multiplied by field wieghts, and adds that sum to the final rank.

    Example:

    sql_joined_field = \
    	tagstext from query; \
    	SELECT docid, CONCAT('tag',tagid) FROM tags ORDER BY docid ASC
    

    11.1.14. sql_query_range

    Range query setup. Optional, default is empty. Applies to SQL source types (mysql, pgsql, mssql) only.

    Setting this option enables ranged document fetch queries (see Section 3.8, “Ranged queries”). Ranged queries are useful to avoid notorious MyISAM table locks when indexing lots of data. (They also help with other less notorious issues, such as reduced performance caused by big result sets, or additional resources consumed by InnoDB to serialize big read transactions.)

    The query specified in this option must fetch min and max document IDs that will be used as range boundaries. It must return exactly two integer fields, min ID first and max ID second; the field names are ignored.

    When ranged queries are enabled, sql_query will be required to contain $start and $end macros (because it obviously would be a mistake to index the whole table many times over). Note that the intervals specified by $start..$end will not overlap, so you should not remove document IDs that are exactly equal to $start or $end from your query. The example in Section 3.8, “Ranged queries”) illustrates that; note how it uses greater-or-equal and less-or-equal comparisons.

    Example:

    sql_query_range = SELECT MIN(id),MAX(id) FROM documents
    

    11.1.15. sql_range_step

    Range query step. Optional, default is 1024. Applies to SQL source types (mysql, pgsql, mssql) only.

    Only used when ranged queries are enabled. The full document IDs interval fetched by sql_query_range will be walked in this big steps. For example, if min and max IDs fetched are 12 and 3456 respectively, and the step is 1000, indexer will call sql_query several times with the following substitutions:

    • $start=12, $end=1011

    • $start=1012, $end=2011

    • $start=2012, $end=3011

    • $start=3012, $end=3456

    Example:

    sql_range_step = 1000
    

    11.1.16. sql_query_killlist

    Kill-list query. Optional, default is empty (no query). Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 0.9.9-rc1.

    This query is expected to return a number of 1-column rows, each containing just the document ID. The returned document IDs are stored within an index. Kill-list for a given index suppresses results from other indexes, depending on index order in the query. The intended use is to help implement deletions and updates on existing indexes without rebuilding (actually even touching them), and especially to fight phantom results problem.

    Let us dissect an example. Assume we have two indexes, 'main' and 'delta'. Assume that documents 2, 3, and 5 were deleted since last reindex of 'main', and documents 7 and 11 were updated (ie. their text contents were changed). Assume that a keyword 'test' occurred in all these mentioned documents when we were indexing 'main'; still occurs in document 7 as we index 'delta'; but does not occur in document 11 any more. We now reindex delta and then search through both these indexes in proper (least to most recent) order:

    $res = $cl->Query ( "test", "main delta" );
    

    First, we need to properly handle deletions. The result set should not contain documents 2, 3, or 5. Second, we also need to avoid phantom results. Unless we do something about it, document 11 will appear in search results! It will be found in 'main' (but not 'delta'). And it will make it to the final result set unless something stops it.

    Kill-list, or K-list for short, is that something. Kill-list attached to 'delta' will suppress the specified rows from all the preceding indexes, in this case just 'main'. So to get the expected results, we should put all the updated and deleted document IDs into it.

    Example:

    sql_query_killlist = \
    	SELECT id FROM documents WHERE updated_ts>=@last_reindex UNION \
    	SELECT id FROM documents_deleted WHERE deleted_ts>=@last_reindex
    

    11.1.17. sql_attr_uint

    Unsigned integer attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only.

    The column value should fit into 32-bit unsigned integer range. Values outside this range will be accepted but wrapped around. For instance, -1 will be wrapped around to 2^32-1 or 4,294,967,295.

    You can specify bit count for integer attributes by appending ':BITCOUNT' to attribute name (see example below). Attributes with less than default 32-bit size, or bitfields, perform slower. But they require less RAM when using extern storage: such bitfields are packed together in 32-bit chunks in .spa attribute data file. Bit size settings are ignored if using inline storage.

    Example:

    sql_attr_uint = group_id
    sql_attr_uint = forum_id:9 # 9 bits for forum_id
    

    11.1.18. sql_attr_bool

    Boolean attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Equivalent to sql_attr_uint declaration with a bit count of 1.

    Example:

    sql_attr_bool = is_deleted # will be packed to 1 bit
    

    11.1.19. sql_attr_bigint

    64-bit signed integer attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Note that unlike sql_attr_uint, these values are signed. Introduced in version 0.9.9-rc1.

    Example:

    sql_attr_bigint = my_bigint_id
    

    11.1.20. sql_attr_timestamp

    UNIX timestamp attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only.

    Timestamps can store date and time in the range of Jan 01, 1970 to Jan 19, 2038 with a precision of one second. The expected column value should be a timestamp in UNIX format, ie. 32-bit unsigned integer number of seconds elapsed since midnight, January 01, 1970, GMT. Timestamps are internally stored and handled as integers everywhere. But in addition to working with timestamps as integers, it's also legal to use them along with different date-based functions, such as time segments sorting mode, or day/week/month/year extraction for GROUP BY.

    Note that DATE or DATETIME column types in MySQL can not be directly used as timestamp attributes in Sphinx; you need to explicitly convert such columns using UNIX_TIMESTAMP function (if data is in range).

    Note timestamps can not represent dates before January 01, 1970, and UNIX_TIMESTAMP() in MySQL will not return anything expected. If you only needs to work with dates, not times, consider TO_DAYS() function in MySQL instead.

    Example:

    # sql_query = ... UNIX_TIMESTAMP(added_datetime) AS added_ts ...
    sql_attr_timestamp = added_ts
    

    11.1.21. sql_attr_str2ordinal

    Ordinal string number attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only.

    This attribute type (so-called ordinal, for brevity) is intended to allow sorting by string values, but without storing the strings themselves. When indexing ordinals, string values are fetched from database, temporarily stored, sorted, and then replaced by their respective ordinal numbers in the array of sorted strings. So, the ordinal number is an integer such that sorting by it produces the same result as if lexicographically sorting by original strings. by string values lexicographically.

    Earlier versions could consume a lot of RAM for indexing ordinals. Starting with revision r1112, ordinals accumulation and sorting also runs in fixed memory (at the cost of using additional temporary disk space), and honors mem_limit settings.

    Ideally the strings should be sorted differently, depending on the encoding and locale. For instance, if the strings are known to be Russian text in KOI8R encoding, sorting the bytes 0xE0, 0xE1, and 0xE2 should produce 0xE1, 0xE2 and 0xE0, because in KOI8R value 0xE0 encodes a character that is (noticeably) after characters encoded by 0xE1 and 0xE2. Unfortunately, Sphinx does not support that at the moment and will simply sort the strings bytewise.

    Note that the ordinals are by construction local to each index, and it's therefore impossible to merge ordinals while retaining the proper order. The processed strings are replaced by their sequential number in the index they occurred in, but different indexes have different sets of strings. For instance, if 'main' index contains strings "aaa", "bbb", "ccc", and so on up to "zzz", they'll be assigned numbers 1, 2, 3, and so on up to 26, respectively. But then if 'delta' only contains "zzz" the assigned number will be 1. And after the merge, the order will be broken. Unfortunately, this is impossible to workaround without storing the original strings (and once Sphinx supports storing the original strings, ordinals will not be necessary any more).

    Example:

    sql_attr_str2ordinal = author_name
    

    11.1.22. sql_attr_float

    Floating point attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only.

    The values will be stored in single precision, 32-bit IEEE 754 format. Represented range is approximately from 1e-38 to 1e+38. The amount of decimal digits that can be stored precisely is approximately 7. One important usage of the float attributes is storing latitude and longitude values (in radians), for further usage in query-time geosphere distance calculations.

    Example:

    sql_attr_float = lat_radians
    sql_attr_float = long_radians
    

    11.1.23. sql_attr_multi

    Multi-valued attribute (MVA) declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only.

    Plain attributes only allow to attach 1 value per each document. However, there are cases (such as tags or categories) when it is desired to attach multiple values of the same attribute and be able to apply filtering or grouping to value lists.

    The declaration format is as follows (backslashes are for clarity only; everything can be declared in a single line as well):

    sql_attr_multi = ATTR-TYPE ATTR-NAME 'from' SOURCE-TYPE \
    	[;QUERY] \
    	[;RANGE-QUERY]
    

    where

    • ATTR-TYPE is 'uint', 'bigint' or 'timestamp'

    • SOURCE-TYPE is 'field', 'query', or 'ranged-query'

    • QUERY is SQL query used to fetch all ( docid, attrvalue ) pairs

    • RANGE-QUERY is SQL query used to fetch min and max ID values, similar to 'sql_query_range'

    Example:

    sql_attr_multi = uint tag from query; SELECT id, tag FROM tags
    sql_attr_multi = bigint tag from ranged-query; \
    	SELECT id, tag FROM tags WHERE id>=$start AND id<=$end; \
    	SELECT MIN(id), MAX(id) FROM tags
    

    11.1.24. sql_attr_string

    String attribute declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 1.10-beta.

    String attributes can store arbitrary strings attached to every document. There's a fixed size limit of 4 MB per value. Also, searchd will currently cache all the values in RAM, which is an additional implicit limit.

    As of 1.10-beta, strings can only be used for storage and retrieval. They can not participate in expressions, be used for filtering, sorting, or grouping (ie. in WHERE, ORDER or GROUP clauses). Note that attributes declared using sql_attr_string will not be full-text indexed; you can use sql_field_string directive for that.

    Example:

    sql_attr_string = title # will be stored but will not be indexed
    

    11.1.25. sql_attr_str2wordcount

    Word-count attribute declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 1.10-beta.

    Word-count attribute takes a string column, tokenizes it according to index settings, and stores the resulting number of tokens in an attribute. This number of tokens ("word count") is a normal integer that can be later used, for instance, in custom ranking expressions (boost shorter titles, help identify exact field matches, etc).

    Example:

    sql_attr_str2wordcount = title_wc
    

    11.1.26. sql_column_buffers

    Per-column buffer sizes. Optional, default is empty (deduce the sizes automatically). Applies to odbc, mssql source types only. Introduced in version 2.0.1-beta.

    ODBC and MS SQL drivers sometimes can not return the maximum actual column size to be expected. For instance, NVARCHAR(MAX) columns always report their length as 2147483647 bytes to indexer even though the actually used length is likely considerably less. However, the receiving buffers still need to be allocated upfront, and their sizes have to be determined. When the driver does not report the column length at all, Sphinx allocates default 1 KB buffers for each non-char column, and 1 MB buffers for each char column. Driver-reported column length also gets clamped by an upper limie of 8 MB, so in case the driver reports (almost) a 2 GB column length, it will be clamped and a 8 MB buffer will be allocated instead for that column. These hard-coded limits can be overridden using the sql_column_buffers directive, either in order to save memory on actually shorter columns, or overcome the 8 MB limit on actually longer columns. The directive values must be a comma-separated lists of selected column names and sizes:

    sql_column_buffers = <colname>=<size>[K|M] [, ...]
    

    Example:

    sql_query = SELECT id, mytitle, mycontent FROM documents
    sql_column_buffers = mytitle=64K, mycontent=10M
    

    11.1.27. sql_field_string

    Combined string attribute and full-text field declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 1.10-beta.

    sql_attr_string only stores the column value but does not full-text index it. In some cases it might be desired to both full-text index the column and store it as attribute. sql_field_string lets you do exactly that. Both the field and the attribute will be named the same.

    Example:

    sql_field_string = title # will be both indexed and stored
    

    11.1.28. sql_field_str2wordcount

    Combined word-count attribute and full-text field declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 1.10-beta.

    sql_attr_str2wordcount only stores the column word count but does not full-text index it. In some cases it might be desired to both full-text index the column and also have the count. sql_field_str2wordcount lets you do exactly that. Both the field and the attribute will be named the same.

    Example:

    sql_field_str2wordcount = title # will be indexed, and counted/stored
    

    11.1.29. sql_file_field

    File based field declaration. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 1.10-beta.

    This directive makes indexer interpret field contents as a file name, and load and index the referred file. Files larger than max_file_field_buffer in size are skipped. Any errors during the file loading (IO errors, missed limits, etc) will be reported as indexing warnings and will not early terminate the indexing. No content will be indexed for such files.

    Example:

    sql_file_field = my_file_path # load and index files referred to by my_file_path
    

    11.1.30. sql_query_post

    Post-fetch query. Optional, default value is empty. Applies to SQL source types (mysql, pgsql, mssql) only.

    This query is executed immediately after sql_query completes successfully. When post-fetch query produces errors, they are reported as warnings, but indexing is not terminated. It's result set is ignored. Note that indexing is not yet completed at the point when this query gets executed, and further indexing still may fail. Therefore, any permanent updates should not be done from here. For instance, updates on helper table that permanently change the last successfully indexed ID should not be run from post-fetch query; they should be run from post-index query instead.

    Example:

    sql_query_post = DROP TABLE my_tmp_table
    

    11.1.31. sql_query_post_index

    Post-index query. Optional, default value is empty. Applies to SQL source types (mysql, pgsql, mssql) only.

    This query is executed when indexing is fully and succesfully completed. If this query produces errors, they are reported as warnings, but indexing is not terminated. It's result set is ignored. $maxid macro can be used in its text; it will be expanded to maximum document ID which was actually fetched from the database during indexing. If no documents were indexed, $maxid will be expanded to 0.

    Example:

    sql_query_post_index = REPLACE INTO counters ( id, val ) \
        VALUES ( 'max_indexed_id', $maxid )
    

    11.1.32. sql_ranged_throttle

    Ranged query throttling period, in milliseconds. Optional, default is 0 (no throttling). Applies to SQL source types (mysql, pgsql, mssql) only.

    Throttling can be useful when indexer imposes too much load on the database server. It causes the indexer to sleep for given amount of milliseconds once per each ranged query step. This sleep is unconditional, and is performed before the fetch query.

    Example:

    sql_ranged_throttle = 1000 # sleep for 1 sec before each query step
    

    11.1.33. sql_query_info

    Document info query. Optional, default is empty. Applies to mysql source type only.

    Only used by CLI search to fetch and display document information, only works with MySQL at the moment, and only intended for debugging purposes. This query fetches the row that will be displayed by CLI search utility for each document ID. It is required to contain $id macro that expands to the queried document ID.

    Example:

    sql_query_info = SELECT * FROM documents WHERE id=$id
    

    11.1.34. xmlpipe_command

    Shell command that invokes xmlpipe stream producer. Mandatory. Applies to xmlpipe and xmlpipe2 source types only.

    Specifies a command that will be executed and which output will be parsed for documents. Refer to Section 3.9, “xmlpipe data source” or Section 3.10, “xmlpipe2 data source” for specific format description.

    Example:

    xmlpipe_command = cat /home/sphinx/test.xml
    

    11.1.35. xmlpipe_field

    xmlpipe field declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Refer to Section 3.10, “xmlpipe2 data source”.

    Example:

    xmlpipe_field = subject
    xmlpipe_field = content
    

    11.1.36. xmlpipe_field_string

    xmlpipe field and string attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Refer to Section 3.10, “xmlpipe2 data source”. Introduced in version 1.10-beta.

    Makes the specified XML element indexed as both a full-text field and a string attribute. Equivalent to <sphinx:field name="field" attr="string"/> declaration within the XML file.

    Example:

    xmlpipe_field_string = subject
    

    11.1.37. xmlpipe_field_wordcount

    xmlpipe field and word count attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Refer to Section 3.10, “xmlpipe2 data source”. Introduced in version 1.10-beta.

    Makes the specified XML element indexed as both a full-text field and a word count attribute. Equivalent to <sphinx:field name="field" attr="wordcount"/> declaration within the XML file.

    Example:

    xmlpipe_field_wordcount = subject
    

    11.1.38. xmlpipe_attr_uint

    xmlpipe integer attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_uint.

    Example:

    xmlpipe_attr_uint = author_id
    

    11.1.39. xmlpipe_attr_bigint

    xmlpipe signed 64-bit integer attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_bigint.

    Example:

    xmlpipe_attr_bigint = my_bigint_id
    

    11.1.40. xmlpipe_attr_bool

    xmlpipe boolean attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_bool.

    Example:

    xmlpipe_attr_bool = is_deleted # will be packed to 1 bit
    

    11.1.41. xmlpipe_attr_timestamp

    xmlpipe UNIX timestamp attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_timestamp.

    Example:

    xmlpipe_attr_timestamp = published
    

    11.1.42. xmlpipe_attr_str2ordinal

    xmlpipe string ordinal attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_str2ordinal.

    Example:

    xmlpipe_attr_str2ordinal = author_sort
    

    11.1.43. xmlpipe_attr_float

    xmlpipe floating point attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Syntax fully matches that of sql_attr_float.

    Example:

    xmlpipe_attr_float = lat_radians
    xmlpipe_attr_float = long_radians
    

    11.1.44. xmlpipe_attr_multi

    xmlpipe MVA attribute declaration. Multi-value, optional. Applies to xmlpipe2 source type only.

    This setting declares an MVA attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and a list of integers that will constitute the MVA will be extracted, similar to how sql_attr_multi parses SQL column contents when 'field' MVA source type is specified.

    Example:

    xmlpipe_attr_multi = taglist
    

    11.1.45. xmlpipe_attr_multi_64

    xmlpipe MVA attribute declaration. Declares the BIGINT (signed 64-bit integer) MVA attribute. Multi-value, optional. Applies to xmlpipe2 source type only.

    This setting declares an MVA attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and a list of integers that will constitute the MVA will be extracted, similar to how sql_attr_multi parses SQL column contents when 'field' MVA source type is specified.

    Example:

    xmlpipe_attr_multi_64 = taglist
    

    11.1.46. xmlpipe_attr_string

    xmlpipe string declaration. Multi-value, optional. Applies to xmlpipe2 source type only. Introduced in version 1.10-beta.

    This setting declares a string attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and stored as a string value.

    Example:

    xmlpipe_attr_string = subject
    

    11.1.47. xmlpipe_fixup_utf8

    Perform Sphinx-side UTF-8 validation and filtering to prevent XML parser from choking on non-UTF-8 documents. Optional, default is 0. Applies to xmlpipe2 source type only.

    Under certain occasions it might be hard or even impossible to guarantee that the incoming XMLpipe2 document bodies are in perfectly valid and conforming UTF-8 encoding. For instance, documents with national single-byte encodings could sneak into the stream. libexpat XML parser is fragile, meaning that it will stop processing in such cases. UTF8 fixup feature lets you avoid that. When fixup is enabled, Sphinx will preprocess the incoming stream before passing it to the XML parser and replace invalid UTF-8 sequences with spaces.

    Example:

    xmlpipe_fixup_utf8 = 1
    

    11.1.48. mssql_winauth

    MS SQL Windows authentication flag. Boolean, optional, default value is 0 (false). Applies to mssql source type only. Introduced in version 0.9.9-rc1.

    Whether to use currently logged in Windows account credentials for authentication when connecting to MS SQL Server. Note that when running searchd as a service, account user can differ from the account you used to install the service.

    Example:

    mssql_winauth = 1
    

    11.1.49. mssql_unicode

    MS SQL encoding type flag. Boolean, optional, default value is 0 (false). Applies to mssql source type only. Introduced in version 0.9.9-rc1.

    Whether to ask for Unicode or single-byte data when querying MS SQL Server. This flag must be in sync with charset_type directive; that is, to index Unicode data, you must set both charset_type in the index (to 'utf-8') and mssql_unicode in the source (to 1). For reference, MS SQL will actually return data in UCS-2 encoding instead of UTF-8, but Sphinx will automatically handle that.

    Example:

    mssql_unicode = 1
    

    11.1.50. unpack_zlib

    Columns to unpack using zlib (aka deflate, aka gunzip). Multi-value, optional, default value is empty list of columns. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 0.9.9-rc1.

    Columns specified using this directive will be unpacked by indexer using standard zlib algorithm (called deflate and also implemented by gunzip). When indexing on a different box than the database, this lets you offload the database, and save on network traffic. The feature is only available if zlib and zlib-devel were both available during build time.

    Example:

    unpack_zlib = col1
    unpack_zlib = col2
    

    11.1.51. unpack_mysqlcompress

    Columns to unpack using MySQL UNCOMPRESS() algorithm. Multi-value, optional, default value is empty list of columns. Applies to SQL source types (mysql, pgsql, mssql) only. Introduced in version 0.9.9-rc1.

    Columns specified using this directive will be unpacked by indexer using modified zlib algorithm used by MySQL COMPRESS() and UNCOMPRESS() functions. When indexing on a different box than the database, this lets you offload the database, and save on network traffic. The feature is only available if zlib and zlib-devel were both available during build time.

    Example:

    unpack_mysqlcompress = body_compressed
    unpack_mysqlcompress = description_compressed
    

    11.1.52. unpack_mysqlcompress_maxsize

    Buffer size for UNCOMPRESS()ed data. Optional, default value is 16M. Introduced in version 0.9.9-rc1.

    When using unpack_mysqlcompress, due to implementation intrincacies it is not possible to deduce the required buffer size from the compressed data. So the buffer must be preallocated in advance, and unpacked data can not go over the buffer size. This option lets you control the buffer size, both to limit indexer memory use, and to enable unpacking of really long data fields if necessary.

    Example:

    unpack_mysqlcompress_maxsize = 1M
    

    11.2. Index configuration options

    11.2.1. type

    Index type. Known values are 'plain', 'distributed', and 'rt'. Optional, default is 'plain' (plain local index).

    Sphinx supports several different types of indexes. Versions 0.9.x supported two index types: plain local indexes that are stored and processed on the local machine; and distributed indexes, that involve not only local searching but querying remote searchd instances over the network as well (see Section 5.8, “Distributed searching”). Version 1.10-beta also adds support for so-called real-time indexes (or RT indexes for short) that are also stored and processed locally, but additionally allow for on-the-fly updates of the full-text index (see Chapter 4, Real-time indexes). Note that attributes can be updated on-the-fly using either plain local indexes or RT ones.

    Index type setting lets you choose the needed type. By default, plain local index type will be assumed.

    Example:

    type = distributed
    

    11.2.2. source

    Adds document source to local index. Multi-value, mandatory.

    Specifies document source to get documents from when the current index is indexed. There must be at least one source. There may be multiple sources, without any restrictions on the source types: ie. you can pull part of the data from MySQL server, part from PostgreSQL, part from the filesystem using xmlpipe2 wrapper.

    However, there are some restrictions on the source data. First, document IDs must be globally unique across all sources. If that condition is not met, you might get unexpected search results. Second, source schemas must be the same in order to be stored within the same index.

    No source ID is stored automatically. Therefore, in order to be able to tell what source the matched document came from, you will need to store some additional information yourself. Two typical approaches include:

    1. mangling document ID and encoding source ID in it:

      source src1
      {
      	sql_query = SELECT id*10+1, ... FROM table1
      	...
      }
      
      source src2
      {
      	sql_query = SELECT id*10+2, ... FROM table2
      	...
      }
      

    2. storing source ID simply as an attribute:

      source src1
      {
      	sql_query = SELECT id, 1 AS source_id FROM table1
      	sql_attr_uint = source_id
      	...
      }
      
      source src2
      {
      	sql_query = SELECT id, 2 AS source_id FROM table2
      	sql_attr_uint = source_id
      	...
      }
      

    Example:

    source = srcpart1
    source = srcpart2
    source = srcpart3
    

    11.2.3. path

    Index files path and file name (without extension). Mandatory.

    Path specifies both directory and file name, but without extension. indexer will append different extensions to this path when generating final names for both permanent and temporary index files. Permanent data files have several different extensions starting with '.sp'; temporary files' extensions start with '.tmp'. It's safe to remove .tmp* files is if indexer fails to remove them automatically.

    For reference, different index files store the following data:

    • .spa stores document attributes (used in extern docinfo storage mode only);

    • .spd stores matching document ID lists for each word ID;

    • .sph stores index header information;

    • .spi stores word lists (word IDs and pointers to .spd file);

    • .spk stores kill-lists;

    • .spm stores MVA data;

    • .spp stores hit (aka posting, aka word occurence) lists for each word ID;

    • .sps stores string attribute data.

    Example:

    path = /var/data/test1
    

    11.2.4. docinfo

    Document attribute values (docinfo) storage mode. Optional, default is 'extern'. Known values are 'none', 'extern' and 'inline'.

    Docinfo storage mode defines how exactly docinfo will be physically stored on disk and RAM. "none" means that there will be no docinfo at all (ie. no attributes). Normally you need not to set "none" explicitly because Sphinx will automatically select "none" when there are no attributes configured. "inline" means that the docinfo will be stored in the .spd file, along with the document ID lists. "extern" means that the docinfo will be stored separately (externally) from document ID lists, in a special .spa file.

    Basically, externally stored docinfo must be kept in RAM when querying. for performance reasons. So in some cases "inline" might be the only option. However, such cases are infrequent, and docinfo defaults to "extern". Refer to Section 3.3, “Attributes” for in-depth discussion and RAM usage estimates.

    Example:

    docinfo = inline
    

    11.2.5. mlock

    Memory locking for cached data. Optional, default is 0 (do not call mlock()).

    For search performance, searchd preloads a copy of .spa and .spi files in RAM, and keeps that copy in RAM at all times. But if there are no searches on the index for some time, there are no accesses to that cached copy, and OS might decide to swap it out to disk. First queries to such "cooled down" index will cause swap-in and their latency will suffer.

    Setting mlock option to 1 makes Sphinx lock physical RAM used for that cached data using mlock(2) system call, and that prevents swapping (see man 2 mlock for details). mlock(2) is a privileged call, so it will require searchd to be either run from root account, or be granted enough privileges otherwise. If mlock() fails, a warning is emitted, but index continues working.

    Example:

    mlock = 1
    

    11.2.6. morphology

    A list of morphology preprocessors to apply. Optional, default is empty (do not apply any preprocessor).

    Morphology preprocessors can be applied to the words being indexed to replace different forms of the same word with the base, normalized form. For instance, English stemmer will normalize both "dogs" and "dog" to "dog", making search results for both searches the same.

    Built-in preprocessors include English stemmer, Russian stemmer (that supports UTF-8 and Windows-1251 encodings), Soundex, and Metaphone. The latter two replace the words with special phonetic codes that are equal is words are phonetically close. Additional stemmers provided by Snowball project libstemmer library can be enabled at compile time using --with-libstemmer configure option. Built-in English and Russian stemmers should be faster than their libstemmer counterparts, but can produce slightly different results, because they are based on an older version. Metaphone implementation is based on Double Metaphone algorithm and indexes the primary code.

    Built-in values that are available for use in morphology option are as follows:

    • none - do not perform any morphology processing;

    • stem_en - apply Porter's English stemmer;

    • stem_ru - apply Porter's Russian stemmer;

    • stem_enru - apply Porter's English and Russian stemmers;

    • stem_cz - apply Czech stemmer;

    • soundex - replace keywords with their SOUNDEX code;

    • metaphone - replace keywords with their METAPHONE code.

    Additional values provided by libstemmer are in 'libstemmer_XXX' format, where XXX is libstemmer algorithm codename (refer to libstemmer_c/libstemmer/modules.txt for a complete list).

    Several stemmers can be specified (comma-separated). They will be applied to incoming words in the order they are listed, and the processing will stop once one of the stemmers actually modifies the word. Also when wordforms feature is enabled the word will be looked up in word forms dictionary first, and if there is a matching entry in the dictionary, stemmers will not be applied at all. Or in other words, wordforms can be used to implement stemming exceptions.

    Example:

    morphology = stem_en, libstemmer_sv
    

    11.2.7. dict

    The keywords dictionary type. Known values are 'crc' and 'keywords'. Optional, default is 'crc'. Introduced in version 2.0.1-beta.

    CRC dictionary mode (dict=crc) is the default dictionary type in Sphinx, and the only one available until version 2.0.1-beta. Keywords dictionary mode (dict=keywords) was added in 2.0.1-beta, primarly to (greatly) reduce indexing impact and enable substring searches on huge collections. They also eliminate the chance of CRC32 collisions. In 2.0.1-beta, that mode was only supported for disk indexes. Starting with 2.0.2-beta, RT indexes are also supported.

    CRC dictionaries never store the original keyword text in the index. Instead, keywords are replaced with their control sum value (either CRC32 or FNV64, depending whether Sphinx was built with --enable-id64) both when searching and indexing, and that value is used internally in the index.

    That approach has two drawbacks. First, in CRC32 case there is a chance of control sum collision between several pairs of different keywords, growing quadratically with the number of unique keywords in the index. (FNV64 case is unaffected in practice, as a chance of a single FNV64 collision in a dictionary of 1 billion entries is approximately 1:16, or 6.25 percent. And most dictionaries will be much more compact that a billion keywords, as a typical spoken human language has in the region of 1 to 10 million word forms.) Second, and more importantly, substring searches are not directly possible with control sums. Sphinx alleviated that by pre-indexing all the possible substrings as separate keywords (see Section 11.2.18, “min_prefix_len”, Section 11.2.19, “min_infix_len” directives). That actually has an added benefit of matching substrings in the quickest way possible. But at the same time pre-indexing all substrings grows the index size a lot (factors of 3-10x and even more would not be unusual) and impacts the indexing time respectively, rendering substring searches on big indexes rather impractical.

    Keywords dictionary, introduced in 2.0.1-beta, fixes both these drawbacks. It stores the keywords in the index and performs search-time wildcard expansion. For example, a search for a 'test*' prefix could internally expand to 'test|tests|testing' query based on the dictionary contents. That expansion is fully transparent to the application, except that the separate per-keyword statistics for all the actually matched keywords would now also be reported.

    Indexing with keywords dictionary should be 1.1x to 1.3x slower compared to regular, non-substring indexing - but times faster compared to substring indexing (either prefix or infix). Index size should only be slightly bigger that than of the regular non-substring index, with a 1..10% percent total difference Regular keyword searching time must be very close or identical across all three discussed index kinds (CRC non-substring, CRC substring, keywords). Substring searching time can vary greatly depending on how many actual keywords match the given substring (in other words, into how many keywords does the search term expand). The maximum number of keywords matched is restricted by the expansion_limit directive.

    Essentially, keywords and CRC dictionaries represent the two different trade-off substring searching decisions. You can choose to either sacrifice indexing time and index size in favor of top-speed worst-case searches (CRC dictionary), or only slightly impact indexing time but sacrifice worst-case searching time when the prefix expands into very many keywords (keywords dictionary).

    Example:

    dict = keywords
    

    11.2.8. index_sp

    Whether to detect and index sentence and paragraph boundaries. Optional, default is 0 (do not detect and index). Introduced in version 2.0.1-beta.

    This directive enables sentence and paragraph boundary indexing. It's required for the SENTENCE and PARAGRAPH operators to work. Sentence boundary detection is based on plain text analysis, so you only need to set index_sp = 1 to enable it. Paragraph detection is however based on HTML markup, and happens in the HTML stripper. So to index paragraph locations you also need to enable the stripper by specifying html_strip = 1. Both types of boundaries are detected based on a few built-in rules enumerated just below.

    Sentence boundary detection rules are as follows.

    • Question and excalamation signs (? and !) are always a sentence boundary.

    • Trailing dot (.) is a sentence boundary, except:

      • When followed by a letter. That's considered a part of an abbreviation (as in "S.T.A.L.K.E.R" or "Goldman Sachs S.p.A.").

      • When followed by a comma. That's considered an abbreviation followed by a comma (as in "Telecom Italia S.p.A., founded in 1994").

      • When followed by a space and a small letter. That's considered an abbreviation within a sentence (as in "News Corp. announced in Februrary").

      • When preceded by a space and a capital letter, and followed by a space. That's considered a middle initial (as in "John D. Doe").

    Paragraph boundaries are inserted at every block-level HTML tag. Namely, those are (as taken from HTML 4 standard) ADDRESS, BLOCKQUOTE, CAPTION, CENTER, DD, DIV, DL, DT, H1, H2, H3, H4, H5, LI, MENU, OL, P, PRE, TABLE, TBODY, TD, TFOOT, TH, THEAD, TR, and UL.

    Both sentences and paragraphs increment the keyword position counter by 1.

    Example:

    index_sp = 1
    

    11.2.9. index_zones

    A list of in-field HTML/XML zones to index. Optional, default is empty (do not index zones). Introduced in version 2.0.1-beta.

    Zones can be formally defined as follows. Everything between an opening and a matching closing tag is called a span, and the aggregate of all spans corresponding sharing the same tag name is called a zone. For instance, everything between the occurrences of <H1> and </H1> in the document field belongs to H1 zone.

    Zone indexing, enabled by index_zones directive, is an optional extension of the HTML stripper. So it will also require that the stripper is enabled (with html_strip = 1). The value of the index_zones should be a comma-separated list of those tag names and wildcards (ending with a star) that should be indexed as zones.

    Zones can nest and overlap arbitrarily. The only requirement is that every opening tag has a matching tag. You can also have an arbitrary number of both zones (as in unique zone names, such as H1) and spans (all the occurrences of those H1 tags) in a document. Once indexed, zones can then be used for matching with the ZONE operator, see Section 5.3, “Extended query syntax”.

    Example:

    index_zones = h*, th, title
    

    11.2.10. min_stemming_len

    Minimum word length at which to enable stemming. Optional, default is 1 (stem everything). Introduced in version 0.9.9-rc1.

    Stemmers are not perfect, and might sometimes produce undesired results. For instance, running "gps" keyword through Porter stemmer for English results in "gp", which is not really the intent. min_stemming_len feature lets you suppress stemming based on the source word length, ie. to avoid stemming too short words. Keywords that are shorter than the given threshold will not be stemmed. Note that keywords that are exactly as long as specified will be stemmed. So in order to avoid stemming 3-character keywords, you should specify 4 for the value. For more finely grained control, refer to wordforms feature.

    Example:

    min_stemming_len = 4
    

    11.2.11. stopwords

    Stopword files list (space separated). Optional, default is empty.

    Stopwords are the words that will not be indexed. Typically you'd put most frequent words in the stopwords list because they do not add much value to search results but consume a lot of resources to process.

    You can specify several file names, separated by spaces. All the files will be loaded. Stopwords file format is simple plain text. The encoding must match index encoding specified in charset_type. File data will be tokenized with respect to charset_table settings, so you can use the same separators as in the indexed data. The stemmers will also be applied when parsing stopwords file.

    While stopwords are not indexed, they still do affect the keyword positions. For instance, assume that "the" is a stopword, that document 1 contains the line "in office", and that document 2 contains "in the office". Searching for "in office" as for exact phrase will only return the first document, as expected, even though "the" in the second one is stopped.

    Stopwords files can either be created manually, or semi-automatically. indexer provides a mode that creates a frequency dictionary of the index, sorted by the keyword frequency, see --buildstops and --buildfreqs switch in Section 6.1, “indexer command reference”. Top keywords from that dictionary can usually be used as stopwords.

    Example:

    stopwords = /usr/local/sphinx/data/stopwords.txt
    stopwords = stopwords-ru.txt stopwords-en.txt
    

    11.2.12. wordforms

    Word forms dictionary. Optional, default is empty.

    Word forms are applied after tokenizing the incoming text by charset_table rules. They essentialy let you replace one word with another. Normally, that would be used to bring different word forms to a single normal form (eg. to normalize all the variants such as "walks", "walked", "walking" to the normal form "walk"). It can also be used to implement stemming exceptions, because stemming is not applied to words found in the forms list.

    Dictionaries are used to normalize incoming words both during indexing and searching. Therefore, to pick up changes in wordforms file it's required to reindex and restart searchd.

    Word forms support in Sphinx is designed to support big dictionaries well. They moderately affect indexing speed: for instance, a dictionary with 1 million entries slows down indexing about 1.5 times. Searching speed is not affected at all. Additional RAM impact is roughly equal to the dictionary file size, and dictionaries are shared across indexes: ie. if the very same 50 MB wordforms file is specified for 10 different indexes, additional searchd RAM usage will be about 50 MB.

    Dictionary file should be in a simple plain text format. Each line should contain source and destination word forms, in exactly the same encoding as specified in charset_type, separated by "greater" sign. Rules from the charset_table will be applied when the file is loaded. So basically it's as case sensitive as your other full-text indexed data, ie. typically case insensitive. Here's the file contents sample:

    walks > walk
    walked > walk
    walking > walk
    

    There is a bundled spelldump utility that helps you create a dictionary file in the format Sphinx can read from source .dict and .aff dictionary files in ispell or MySpell format (as bundled with OpenOffice).

    Starting with version 0.9.9-rc1, you can map several source words to a single destination word. Because the work happens on tokens, not the source text, differences in whitespace and markup are ignored.

    core 2 duo > c2d
    e6600 > c2d
    core 2duo > c2d
    

    Notice however that the destination wordforms are still always interpreted as a single keyword! Having a mapping like "St John > Saint John" will result in not matching "St John" when searching for "Saint" or "John", because the destination keyword will be "Saint John" with a space character in it (and it's barely possible to input a destination keyword with a space).

    Example:

    wordforms = /usr/local/sphinx/data/wordforms.txt
    

    11.2.13. exceptions

    Tokenizing exceptions file. Optional, default is empty.

    Exceptions allow to map one or more tokens (including tokens with characters that would normally be excluded) to a single keyword. They are similar to wordforms in that they also perform mapping, but have a number of important differences.

    Short summary of the differences is as follows:

    • exceptions are case sensitive, wordforms are not;

    • exceptions can use special characters that are not in charset_table, wordforms fully obey charset_table;

    • exceptions can underperform on huge dictionaries, wordforms handle millions of entries well.

    The expected file format is also plain text, with one line per exception, and the line format is as follows:

    map-from-tokens => map-to-token
    

    Example file:

    AT & T => AT&T
    AT&T => AT&T
    Standarten   Fuehrer => standartenfuhrer
    Standarten Fuhrer => standartenfuhrer
    MS Windows => ms windows
    Microsoft Windows => ms windows
    C++ => cplusplus
    c++ => cplusplus
    C plus plus => cplusplus
    

    All tokens here are case sensitive: they will not be processed by charset_table rules. Thus, with the example exceptions file above, "At&t" text will be tokenized as two keywords "at" and "t", because of lowercase letters. On the other hand, "AT&T" will match exactly and produce single "AT&T" keyword.

    Note that this map-to keyword is a) always interpereted as a single word, and b) is both case and space sensitive! In our sample, "ms windows" query will not match the document with "MS Windows" text. The query will be interpreted as a query for two keywords, "ms" and "windows". And what "MS Windows" gets mapped to is a single keyword "ms windows", with a space in the middle. On the other hand, "standartenfuhrer" will retrieve documents with "Standarten Fuhrer" or "Standarten Fuehrer" contents (capitalized exactly like this), or any capitalization variant of the keyword itself, eg. "staNdarTenfUhreR". (It won't catch "standarten fuhrer", however: this text does not match any of the listed exceptions because of case sensitivity, and gets indexed as two separate keywords.)

    Whitespace in the map-from tokens list matters, but its amount does not. Any amount of the whitespace in the map-form list will match any other amount of whitespace in the indexed document or query. For instance, "AT & T" map-from token will match "AT    &  T" text, whatever the amount of space in both map-from part and the indexed text. Such text will therefore be indexed as a special "AT&T" keyword, thanks to the very first entry from the sample.

    Exceptions also allow to capture special characters (that are exceptions from general charset_table rules; hence the name). Assume that you generally do not want to treat '+' as a valid character, but still want to be able search for some exceptions from this rule such as 'C++'. The sample above will do just that, totally independent of what characters are in the table and what are not.

    Exceptions are applied to raw incoming document and query data during indexing and searching respectively. Therefore, to pick up changes in the file it's required to reindex and restart searchd.

    Example:

    exceptions = /usr/local/sphinx/data/exceptions.txt
    

    11.2.14. min_word_len

    Minimum indexed word length. Optional, default is 1 (index everything).

    Only those words that are not shorter than this minimum will be indexed. For instance, if min_word_len is 4, then 'the' won't be indexed, but 'they' will be.

    Example:

    min_word_len = 4
    

    11.2.15. charset_type

    Character set encoding type. Optional, default is 'sbcs'. Known values are 'sbcs' and 'utf-8'.

    Different encodings have different methods for mapping their internal characters codes into specific byte sequences. Two most common methods in use today are single-byte encoding and UTF-8. Their corresponding charset_type values are 'sbcs' (stands for Single Byte Character Set) and 'utf-8'. The selected encoding type will be used everywhere where the index is used: when indexing the data, when parsing the query against this index, when generating snippets, etc.

    Note that while 'utf-8' implies that the decoded values must be treated as Unicode codepoint numbers, there's a family of 'sbcs' encodings that may in turn treat different byte values differently, and that should be properly reflected in your charset_table settings. For example, the same byte value of 224 (0xE0 hex) maps to different Russian letters depending on whether koi-8r or windows-1251 encoding is used.

    Example:

    charset_type = utf-8
    

    11.2.16. charset_table

    Accepted characters table, with case folding rules. Optional, default value depends on charset_type value.

    charset_table is the main workhorse of Sphinx tokenizing process, ie. the process of extracting keywords from document text or query txet. It controls what characters are accepted as valid and what are not, and how the accepted characters should be transformed (eg. should the case be removed or not).

    You can think of charset_table as of a big table that has a mapping for each and every of 100K+ characters in Unicode (or as of a small 256-character table if you're using SBCS). By default, every character maps to 0, which means that it does not occur within keywords and should be treated as a separator. Once mentioned in the table, character is mapped to some other character (most frequently, either to itself or to a lowercase letter), and is treated as a valid keyword part.

    The expected value format is a commas-separated list of mappings. Two simplest mappings simply declare a character as valid, and map a single character to another single character, respectively. But specifying the whole table in such form would result in bloated and barely manageable specifications. So there are several syntax shortcuts that let you map ranges of characters at once. The complete list is as follows:

    A->a

    Single char mapping, declares source char 'A' as allowed to occur within keywords and maps it to destination char 'a' (but does not declare 'a' as allowed).

    A..Z->a..z

    Range mapping, declares all chars in source range as allowed and maps them to the destination range. Does not declare destination range as allowed. Also checks ranges' lengths (the lengths must be equal).

    a

    Stray char mapping, declares a character as allowed and maps it to itself. Equivalent to a->a single char mapping.

    a..z

    Stray range mapping, declares all characters in range as allowed and maps them to themselves. Equivalent to a..z->a..z range mapping.

    A..Z/2

    Checkerboard range map. Maps every pair of chars to the second char. More formally, declares odd characters in range as allowed and maps them to the even ones; also declares even characters as allowed and maps them to themselves. For instance, A..Z/2 is equivalent to A->B, B->B, C->D, D->D, ..., Y->Z, Z->Z. This mapping shortcut is helpful for a number of Unicode blocks where uppercase and lowercase letters go in such interleaved order instead of contiguous chunks.

    Control characters with codes from 0 to 31 are always treated as separators. Characters with codes 32 to 127, ie. 7-bit ASCII characters, can be used in the mappings as is. To avoid configuration file encoding issues, 8-bit ASCII characters and Unicode characters must be specified in U+xxx form, where 'xxx' is hexadecimal codepoint number. This form can also be used for 7-bit ASCII characters to encode special ones: eg. use U+20 to encode space, U+2E to encode dot, U+2C to encode comma.

    Example:

    # 'sbcs' defaults for English and Russian
    charset_table = 0..9, A..Z->a..z, _, a..z, \
    	U+A8->U+B8, U+B8, U+C0..U+DF->U+E0..U+FF, U+E0..U+FF
    
    # 'utf-8' defaults for English and Russian
    charset_table = 0..9, A..Z->a..z, _, a..z, \
    	U+410..U+42F->U+430..U+44F, U+430..U+44F
    

    11.2.17. ignore_chars

    Ignored characters list. Optional, default is empty.

    Useful in the cases when some characters, such as soft hyphenation mark (U+00AD), should be not just treated as separators but rather fully ignored. For example, if '-' is simply not in the charset_table, "abc-def" text will be indexed as "abc" and "def" keywords. On the contrary, if '-' is added to ignore_chars list, the same text will be indexed as a single "abcdef" keyword.

    The syntax is the same as for charset_table, but it's only allowed to declare characters, and not allowed to map them. Also, the ignored characters must not be present in charset_table.

    Example:

    ignore_chars = U+AD
    

    11.2.18. min_prefix_len

    Minimum word prefix length to index. Optional, default is 0 (do not index prefixes).

    Prefix indexing allows to implement wildcard searching by 'wordstart*' wildcards (refer to enable_star option for details on wildcard syntax). When mininum prefix length is set to a positive number, indexer will index all the possible keyword prefixes (ie. word beginnings) in addition to the keywords themselves. Too short prefixes (below the minimum allowed length) will not be indexed.

    For instance, indexing a keyword "example" with min_prefix_len=3 will result in indexing "exa", "exam", "examp", "exampl" prefixes along with the word itself. Searches against such index for "exam" will match documents that contain "example" word, even if they do not contain "exam" on itself. However, indexing prefixes will make the index grow significantly (because of many more indexed keywords), and will degrade both indexing and searching times.

    There's no automatic way to rank perfect word matches higher in a prefix index, but there's a number of tricks to achieve that. First, you can setup two indexes, one with prefix indexing and one without it, search through both, and use SetIndexWeights() call to combine weights. Second, you can enable star-syntax and rewrite your extended-mode queries:

    # in sphinx.conf
    enable_star = 1
    
    // in query
    $cl->Query ( "( keyword | keyword* ) other keywords" );
    

    Example:

    min_prefix_len = 3
    

    11.2.19. min_infix_len

    Minimum infix prefix length to index. Optional, default is 0 (do not index infixes).

    Infix indexing allows to implement wildcard searching by 'start*', '*end', and '*middle*' wildcards (refer to enable_star option for details on wildcard syntax). When mininum infix length is set to a positive number, indexer will index all the possible keyword infixes (ie. substrings) in addition to the keywords themselves. Too short infixes (below the minimum allowed length) will not be indexed. For instance, indexing a keyword "test" with min_infix_len=2 will result in indexing "te", "es", "st", "tes", "est" infixes along with the word itself. Searches against such index for "es" will match documents that contain "test" word, even if they do not contain "es" on itself. However, indexing infixes will make the index grow significantly (because of many more indexed keywords), and will degrade both indexing and searching times.

    There's no automatic way to rank perfect word matches higher in an infix index, but the same tricks as with prefix indexes can be applied.

    Example:

    min_infix_len = 3
    

    11.2.20. prefix_fields

    The list of full-text fields to limit prefix indexing to. Optional, default is empty (index all fields in prefix mode).

    Because prefix indexing impacts both indexing and searching performance, it might be desired to limit it to specific full-text fields only: for instance, to provide prefix searching through URLs, but not through page contents. prefix_fields specifies what fields will be prefix-indexed; all other fields will be indexed in normal mode. The value format is a comma-separated list of field names.

    Example:

    prefix_fields = url, domain
    

    11.2.21. infix_fields

    The list of full-text fields to limit infix indexing to. Optional, default is empty (index all fields in infix mode).

    Similar to prefix_fields, but lets you limit infix-indexing to given fields.

    Example:

    infix_fields = url, domain
    

    11.2.22. enable_star

    Enables star-syntax (or wildcard syntax) when searching through prefix/infix indexes. Optional, default is is 0 (do not use wildcard syntax), for compatibility with 0.9.7. Known values are 0 and 1.

    This feature enables "star-syntax", or wildcard syntax, when searching through indexes which were created with prefix or infix indexing enabled. It only affects searching; so it can be changed without reindexing by simply restarting searchd.

    The default value is 0, that means to disable star-syntax and treat all keywords as prefixes or infixes respectively, depending on indexing-time min_prefix_len and min_infix_len settings. The value of 1 means that star ('*') can be used at the start and/or the end of the keyword. The star will match zero or more characters.

    For example, assume that the index was built with infixes and that enable_star is 1. Searching should work as follows:

    1. "abcdef" query will match only those documents that contain the exact "abcdef" word in them.

    2. "abc*" query will match those documents that contain any words starting with "abc" (including the documents which contain the exact "abc" word only);

    3. "*cde*" query will match those documents that contain any words which have "cde" characters in any part of the word (including the documents which contain the exact "cde" word only).

    4. "*def" query will match those documents that contain any words ending with "def" (including the documents that contain the exact "def" word only).

    Example:

    enable_star = 1
    

    11.2.23. ngram_len

    N-gram lengths for N-gram indexing. Optional, default is 0 (disable n-gram indexing). Known values are 0 and 1 (other lengths to be implemented).

    N-grams provide basic CJK (Chinese, Japanese, Korean) support for unsegmented texts. The issue with CJK searching is that there could be no clear separators between the words. Ideally, the texts would be filtered through a special program called segmenter that would insert separators in proper locations. However, segmenters are slow and error prone, and it's common to index contiguous groups of N characters, or n-grams, instead.

    When this feature is enabled, streams of CJK characters are indexed as N-grams. For example, if incoming text is "ABCDEF" (where A to F represent some CJK characters) and length is 1, in will be indexed as if it was "A B C D E F". (With length equal to 2, it would produce "AB BC CD DE EF"; but only 1 is supported at the moment.) Only those characters that are listed in ngram_chars table will be split this way; other ones will not be affected.

    Note that if search query is segmented, ie. there are separators between individual words, then wrapping the words in quotes and using extended mode will resut in proper matches being found even if the text was not segmented. For instance, assume that the original query is BC DEF. After wrapping in quotes on the application side, it should look like "BC" "DEF" (with quotes). This query will be passed to Sphinx and internally split into 1-grams too, resulting in "B C" "D E F" query, still with quotes that are the phrase matching operator. And it will match the text even though there were no separators in the text.

    Even if the search query is not segmented, Sphinx should still produce good results, thanks to phrase based ranking: it will pull closer phrase matches (which in case of N-gram CJK words can mean closer multi-character word matches) to the top.

    Example:

    ngram_len = 1
    

    11.2.24. ngram_chars

    N-gram characters list. Optional, default is empty.

    To be used in conjunction with in ngram_len, this list defines characters, sequences of which are subject to N-gram extraction. Words comprised of other characters will not be affected by N-gram indexing feature. The value format is identical to charset_table.

    Example:

    ngram_chars = U+3000..U+2FA1F
    

    11.2.25. phrase_boundary

    Phrase boundary characters list. Optional, default is empty.

    This list controls what characters will be treated as phrase boundaries, in order to adjust word positions and enable phrase-level search emulation through proximity search. The syntax is similar to charset_table. Mappings are not allowed and the boundary characters must not overlap with anything else.

    On phrase boundary, additional word position increment (specified by phrase_boundary_step) will be added to current word position. This enables phrase-level searching through proximity queries: words in different phrases will be guaranteed to be more than phrase_boundary_step distance away from each other; so proximity search within that distance will be equivalent to phrase-level search.

    Phrase boundary condition will be raised if and only if such character is followed by a separator; this is to avoid abbreviations such as S.T.A.L.K.E.R or URLs being treated as several phrases.

    Example:

    phrase_boundary = ., ?, !, U+2026 # horizontal ellipsis
    

    11.2.26. phrase_boundary_step

    Phrase boundary word position increment. Optional, default is 0.

    On phrase boundary, current word position will be additionally incremented by this number. See phrase_boundary for details.

    Example:

    phrase_boundary_step = 100
    

    11.2.27. html_strip

    Whether to strip HTML markup from incoming full-text data. Optional, default is 0. Known values are 0 (disable stripping) and 1 (enable stripping).

    Both HTML tags and entities and considered markup and get processed.

    HTML tags are removed, their contents (i.e., everything between <P> and </P>) are left intact by default. You can choose to keep and index attributes of the tags (e.g., HREF attribute in an A tag, or ALT in an IMG one). Several well-known inline tags are completely removed, all other tags are treated as block level and replaced with whitespace. For example, 'te<B>st</B>' text will be indexed as a single keyword 'test', however, 'te<P>st</P>' will be indexed as two keywords 'te' and 'st'. Known inline tags are as follows: A, B, I, S, U, BASEFONT, BIG, EM, FONT, IMG, LABEL, SMALL, SPAN, STRIKE, STRONG, SUB, SUP, TT.

    HTML entities get decoded and replaced with corresponding UTF-8 characters. Stripper supports both numeric forms (such as &#239;) and text forms (such as &oacute; or &nbsp;). All entities as specified by HTML4 standard are supported.

    Stripping does not work with xmlpipe source type (it's suggested to upgrade to xmlpipe2 anyway). It should work with properly formed HTML and XHTML, but, just as most browsers, may produce unexpected results on malformed input (such as HTML with stray <'s or unclosed >'s).

    Only the tags themselves, and also HTML comments, are stripped. To strip the contents of the tags too (eg. to strip embedded scripts), see html_remove_elements option. There are no restrictions on tag names; ie. everything that looks like a valid tag start, or end, or a comment will be stripped.

    Example:

    html_strip = 1
    

    11.2.28. html_index_attrs

    A list of markup attributes to index when stripping HTML. Optional, default is empty (do not index markup attributes).

    Specifies HTML markup attributes whose contents should be retained and indexed even though other HTML markup is stripped. The format is per-tag enumeration of indexable attributes, as shown in the example below.

    Example:

    html_index_attrs = img=alt,title; a=title;
    

    11.2.29. html_remove_elements

    A list of HTML elements for which to strip contents along with the elements themselves. Optional, default is empty string (do not strip contents of any elements).

    This feature allows to strip element contents, ie. everything that is between the opening and the closing tags. It is useful to remove embedded scripts, CSS, etc. Short tag form for empty elements (ie. <br />) is properly supported; ie. the text that follows such tag will not be removed.

    The value is a comma-separated list of element (tag) names whose contents should be removed. Tag names are case insensitive.

    Example:

    html_remove_elements = style, script
    

    11.2.30. local

    Local index declaration in the distributed index. Multi-value, optional, default is empty.

    This setting is used to declare local indexes that will be searched when given distributed index is searched. Many local indexes can be declared per each distributed index. Any local index can also be mentioned several times in different distributed indexes.

    Note that by default all local indexes will be searched sequentially, utilizing only 1 CPU or core. To parallelize processing of the local parts in the distributed index, you should use dist_threads directive, see Section 11.4.29, “dist_threads”.

    Before dist_threads, there also was a legacy solution to configure searchd to query itself instead of using local indexes (refer to Section 11.2.31, “agent” for the details). However, that creates redundant CPU and network load, and dist_threads is now strongly suggested instead.

    Example:

    local = chunk1
    local = chunk2
    

    11.2.31. agent

    Remote agent declaration in the distributed index. Multi-value, optional, default is empty.

    This setting is used to declare remote agents that will be searched when given distributed index is searched. The agents can be thought of as network pointers that specify host, port, and index names. In the basic case agents would correspond to remote physical machines. More formally, that is not always correct: you can point several agents to the same remote machine; or you can even point agents to the very same single instance of searchd (in order to utilize many CPUs or cores).

    The value format is as follows:

    agent = specification:remote-indexes-list
    specification = hostname ":" port | path
    

    Where 'hostname' is remote host name; 'port' is remote TCP port; 'path' is Unix-domain socket path and 'remote-indexes-list' is a comma-separated list of remote index names.

    All agents will be searched in parallel. However, all indexes specified for a given agent will be searched sequentially in this agent. This lets you fine-tune the configuration to the hardware. For instance, if two remote indexes are stored on the same physical HDD, it's better to configure one agent with several sequentially searched indexes to avoid HDD steping. If they are stored on different HDDs, having two agents will be advantageous, because the work will be fully parallelized. The same applies to CPUs; though CPU performance impact caused by two processes stepping on each other is somewhat smaller and frequently can be ignored at all.

    On machines with many CPUs and/or HDDs, agents can be pointed to the same machine to utilize all of the hardware in parallel and reduce query latency. There is no need to setup several searchd instances for that; it's legal to configure the instance to contact itself. Here's an example setup, intended for a 4-CPU machine, that will use up to 4 CPUs in parallel to process each query:

    index dist
    {
    	type = distributed
    	local = chunk1
    	agent = localhost:9312:chunk2
    	agent = localhost:9312:chunk3
    	agent = localhost:9312:chunk4
    }
    

    Note how one of the chunks is searched locally and the same instance of searchd queries itself to launch searches through three other ones in parallel.

    Example:

    agent = localhost:9312:chunk2 # contact itself
    agent = /var/run/searchd.s:chunk2
    agent = searchbox2:9312:chunk3,chunk4 # search remote indexes
    

    11.2.32. agent_blackhole

    Remote blackhole agent declaration in the distributed index. Multi-value, optional, default is empty. Introduced in version 0.9.9-rc1.

    agent_blackhole lets you fire-and-forget queries to remote agents. That is useful for debugging (or just testing) production clusters: you can setup a separate debugging/testing searchd instance, and forward the requests to this instance from your production master (aggregator) instance without interfering with production work. Master searchd will attempt to connect and query blackhole agent normally, but it will neither wait nor process any responses. Also, all network errors on blackhole agents will be ignored. The value format is completely identical to regular agent directive.

    Example:

    agent_blackhole = testbox:9312:testindex1,testindex2
    

    11.2.33. agent_connect_timeout

    Remote agent connection timeout, in milliseconds. Optional, default is 1000 (ie. 1 second).

    When connecting to remote agents, searchd will wait at most this much time for connect() call to complete succesfully. If the timeout is reached but connect() does not complete, and retries are enabled, retry will be initiated.

    Example:

    agent_connect_timeout = 300
    

    11.2.34. agent_query_timeout

    Remote agent query timeout, in milliseconds. Optional, default is 3000 (ie. 3 seconds).

    After connection, searchd will wait at most this much time for remote queries to complete. This timeout is fully separate from connection timeout; so the maximum possible delay caused by a remote agent equals to the sum of agent_connection_timeout and agent_query_timeout. Queries will not be retried if this timeout is reached; a warning will be produced instead.

    Example:

    agent_query_timeout = 10000 # our query can be long, allow up to 10 sec
    

    11.2.35. preopen

    Whether to pre-open all index files, or open them per each query. Optional, default is 0 (do not preopen).

    This option tells searchd that it should pre-open all index files on startup (or rotation) and keep them open while it runs. Currently, the default mode is not to pre-open the files (this may change in the future). Preopened indexes take a few (currently 2) file descriptors per index. However, they save on per-query open() calls; and also they are invulnerable to subtle race conditions that may happen during index rotation under high load. On the other hand, when serving many indexes (100s to 1000s), it still might be desired to open the on per-query basis in order to save file descriptors.

    This directive does not affect indexer in any way, it only affects searchd.

    Example:

    preopen = 1
    

    11.2.36. ondisk_dict

    Whether to keep the dictionary file (.spi) for this index on disk, or precache it in RAM. Optional, default is 0 (precache in RAM). Introduced in version 0.9.9-rc1.

    The dictionary (.spi) can be either kept on RAM or on disk. The default is to fully cache it in RAM. That improves performance, but might cause too much RAM pressure, especially if prefixes or infixes were used. Enabling ondisk_dict results in 1 additional disk IO per keyword per query, but reduces memory footprint.

    This directive does not affect indexer in any way, it only affects searchd.

    Example:

    ondisk_dict = 1
    

    11.2.37. inplace_enable

    Whether to enable in-place index inversion. Optional, default is 0 (use separate temporary files). Introduced in version 0.9.9-rc1.

    inplace_enable greatly reduces indexing disk footprint, at a cost of slightly slower indexing (it uses around 2x less disk, but yields around 90-95% the original performance).

    Indexing involves two major phases. The first phase collects, processes, and partially sorts documents by keyword, and writes the intermediate result to temporary files (.tmp*). The second phase fully sorts the documents, and creates the final index files. Thus, rebuilding a production index on the fly involves around 3x peak disk footprint: 1st copy for the intermediate temporary files, 2nd copy for newly constructed copy, and 3rd copy for the old index that will be serving production queries in the meantime. (Intermediate data is comparable in size to the final index.) That might be too much disk footprint for big data collections, and inplace_enable allows to reduce it. When enabled, it reuses the temporary files, outputs the final data back to them, and renames them on completion. However, this might require additional temporary data chunk relocation, which is where the performance impact comes from.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    inplace_enable = 1
    

    11.2.38. inplace_hit_gap

    In-place inversion fine-tuning option. Controls preallocated hitlist gap size. Optional, default is 0. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    inplace_hit_gap = 1M
    

    11.2.39. inplace_docinfo_gap

    In-place inversion fine-tuning option. Controls preallocated docinfo gap size. Optional, default is 0. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    inplace_docinfo_gap = 1M
    

    11.2.40. inplace_reloc_factor

    In-place inversion fine-tuning option. Controls relocation buffer size within indexing memory arena. Optional, default is 0.1. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    inplace_reloc_factor = 0.1
    

    11.2.41. inplace_write_factor

    In-place inversion fine-tuning option. Controls in-place write buffer size within indexing memory arena. Optional, default is 0.1. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    inplace_write_factor = 0.1
    

    11.2.42. index_exact_words

    Whether to index the original keywords along with the stemmed/remapped versions. Optional, default is 0 (do not index). Introduced in version 0.9.9-rc1.

    When enabled, index_exact_words forces indexer to put the raw keywords in the index along with the stemmed versions. That, in turn, enables exact form operator in the query language to work. This impacts the index size and the indexing time. However, searching performance is not impacted at all.

    Example:

    index_exact_words = 1
    

    11.2.43. overshort_step

    Position increment on overshort (less that min_word_len) keywords. Optional, allowed values are 0 and 1, default is 1. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    overshort_step = 1
    

    11.2.44. stopword_step

    Position increment on stopwords. Optional, allowed values are 0 and 1, default is 1. Introduced in version 0.9.9-rc1.

    This directive does not affect searchd in any way, it only affects indexer.

    Example:

    stopword_step = 1
    

    11.2.45. hitless_words

    Hitless words list. Optional, allowed values are 'all', or a list file name. Introduced in version 1.10-beta.

    By default, Sphinx full-text index stores not only a list of matching documents for every given keyword, but also a list of its in-document positions (aka hitlist). Hitlists enables phrase, proximity, strict order and other advanced types of searching, as well as phrase proximity ranking. However, hitlists for specific frequent keywords (that can not be stopped for some reason despite being frequent) can get huge and thus slow to process while querying. Also, in some cases we might only care about boolean keyword matching, and never need position-based searching operators (such as phrase matching) nor phrase ranking.

    hitless_words lets you create indexes that either do not have positional information (hitlists) at all, or skip it for specific keywords.

    Hitless index will generally use less space than the respective regular index (about 1.5x can be expected). Both indexing and searching should be faster, at a cost of missing positional query and ranking support. When searching, positional queries (eg. phrase queries) will be automatically converted to respective non-positional (document-level) or combined queries. For instance, if keywords "hello" and "world" are hitless, "hello world" phrase query will be converted to (hello & world) bag-of-words query, matching all documents that mention either of the keywords but not necessarily the exact phrase. And if, in addition, keywords "simon" and "says" are not hitless, "simon says hello world" will be converted to ("simon says" & hello & world) query, matching all documents that contain "hello" and "world" anywhere in the document, and also "simon says" as an exact phrase.

    Example:

    hitless_words = all
    

    11.2.46. expand_keywords

    Expand keywords with exact forms and/or stars when possible. Optional, default is 0 (do not expand keywords). Introduced in version 1.10-beta.

    Queries against indexes with expand_keywords feature enabled are internally expanded as follows. If the index was built with prefix or infix indexing enabled, every keyword gets internally replaced with a disjunction of keyword itself and a respective prefix or infix (keyword with stars). If the index was built with both stemming and index_exact_words enabled, exact form is also added. Here's an example that shows how internal expansion works when all of the above (infixes, stemming, and exact words) are combined:

    running -> ( running | *running* | =running )
    

    Expanded queries take naturally longer to complete, but can possibly improve the search quality, as the documents with exact form matches should be ranked generally higher than documents with stemmed or infix matches.

    Note that the existing query syntax does not allowe to emulate this kind of expansion, because internal expansion works on keyword level and expands keywords within phrase or quorum operators too (which is not possible through the query syntax).

    This directive does not affect indexer in any way, it only affects searchd.

    Example:

    expand_keywords = 1
    

    11.2.47. blend_chars

    Blended characters list. Optional, default is empty. Introduced in version 1.10-beta.

    Blended characters are indexed both as separators and valid characters. For instance, assume that & is configured as blended and AT&T occurs in an indexed document. Three different keywords will get indexed, namely "at&t", treating blended characters as valid, plus "at" and "t", treating them as separators.

    Positions for tokens obtained by replacing blended characters with whitespace are assigned as usual, so regular keywords will be indexed just as if there was no blend_chars specified at all. An additional token that mixes blended and non-blended characters will be put at the starting position. For instance, if the field contents are "AT&T company" occurs in the very beginning of the text field, "at" will be given position 1, "t" position 2, "company" positin 3, and "AT&T" will also be given position 1 ("blending" with the opening regular keyword). Thus, querying for either AT&T or just AT will match that document, and querying for "AT T" as a phrase also match it. Last but not least, phrase query for "AT&T company" will also match it, despite the position

    Blended characters can overlap with special characters used in query syntax (think of T-Mobile or @twitter). Where possible, query parser will automatically handle blended character as blended. For instance, "hello @twitter" within quotes (a phrase operator) would handle @-sign as blended, because @-syntax for field operator is not allowed within phrases. Otherwise, the character would be handled as an operator. So you might want to escape the keywords.

    Starting with version 2.0.1-beta, blended characters can be remapped, so that multiple different blended characters could be normalized into just one base form. This is useful when indexing multiple alternative Unicode codepoints with equivalent glyphs.

    Example:

    blend_chars = +, &, U+23
    blend_chars = +, &->+ # 2.0.1 and above
    

    11.2.48. blend_mode

    Blended tokens indexing mode. Optional, default is trim_none. Introduced in version 2.0.1-beta.

    By default, tokens that mix blended and non-blended characters get indexed in there entirety. For instance, when both at-sign and an exclamation are in blend_chars, "@dude!" will get result in two tokens indexed: "@dude!" (with all the blended characters) and "dude" (without any). Therefore "@dude" query will not match it.

    blend_mode directive adds flexibility to this indexing behavior. It takes a comma-separated list of options.

    blend_mode = option [, option [, ...]]
    option = trim_none | trim_head | trim_tail | trim_both | skip_pure
    

    Options specify token indexing variants. If multiple options are specified, multiple variants of the same token will be indexed. Regular keywords (resulting from that token by replacing blended with whitespace) are always be indexed.

    trim_none

    Index the entire token.

    trim_head

    Trim heading blended characters, and index the resulting token.

    trim_tail

    Trim trailing blended characters, and index the resulting token.

    trim_both

    Trim both heading and trailing blended characters, and index the resulting token.

    skip_pure

    Do not index the token if it's purely blended, that is, consists of blended characters only.

    Returning to the "@dude!" example above, setting blend_mode = trim_head, trim_tail will result in two tokens being indexed, "@dude" and "dude!". In this particular example, trim_both would have no effect, because trimming both blended characters results in "dude" which is already indexed as a regular keyword. Indexing "@U.S.A." with trim_both (and assuming that dot is blended two) would result in "U.S.A" being indexed. Last but not least, skip_pure enables you to fully ignore sequences of blended characters only. For example, "one @@@ two" would be indexed exactly as "one two", and match that as a phrase. That is not the case by default because a fully blended token gets indexed and offsets the second keyword position.

    Default behavior is to index the entire token, equivalent to blend_mode = trim_none.

    Example:

    blend_mode = trim_tail, skip_pure
    

    11.2.49. rt_mem_limit

    RAM chunk size limit. Optional, default is empty. Introduced in version 1.10-beta.

    RT index keeps some data in memory (so-called RAM chunk) and also maintains a number of on-disk indexes (so-called disk chunks). This directive lets you control the RAM chunk size. Once there's too much data to keep in RAM, RT index will flush it to disk, activate a newly created disk chunk, and reset the RAM chunk.

    The limit is pretty strict; RT index should never allocate more memory than it's limited to. The memory is not preallocated either, hence, specifying 512 MB limit and only inserting 3 MB of data should result in allocating 3 MB, not 512 MB.

    Example:

    rt_mem_limit = 512M
    

    11.2.50. rt_field

    Full-text field declaration. Multi-value, mandatory Introduced in version 1.10-beta.

    Full-text fields to be indexed are declared using rt_field directive. The names must be unique. The order is preserved; and so field values in INSERT statements without an explicit list of inserted columns will have to be in the same order as configured.

    Example:

    rt_field = author
    rt_field = title
    rt_field = content
    

    11.2.51. rt_attr_uint

    Unsigned integer attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares an unsigned 32-bit attribute. Introduced in version 1.10-beta.

    Example:

    rt_attr_uint = gid
    

    11.2.52. rt_attr_bigint

    BIGINT attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares a signed 64-bit attribute. Introduced in version 1.10-beta.

    Example:

    rt_attr_bigint = guid
    

    11.2.53. rt_attr_float

    Floating point attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares a single precision, 32-bit IEEE 754 format float attribute. Introduced in version 1.10-beta.

    Example:

    rt_attr_float = gpa
    

    11.2.54. rt_attr_multi

    Multi-valued attribute (MVA) declaration. Declares the UNSIGNED INTEGER (unsigned 32-bit) MVA attribute. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to RT indexes only.

    Example:

    rt_attr_multi = my_tags
    

    11.2.55. rt_attr_multi_64

    Multi-valued attribute (MVA) declaration. Declares the BIGINT (signed 64-bit) MVA attribute. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to RT indexes only.

    Example:

    rt_attr_multi_64 = my_wide_tags
    

    11.2.56. rt_attr_timestamp

    Timestamp attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Introduced in version 1.10-beta.

    Example:

    rt_attr_timestamp = date_added
    

    11.2.57. rt_attr_string

    String attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Introduced in version 1.10-beta.

    Example:

    rt_attr_string = author
    

    11.3. indexer program configuration options

    11.3.1. mem_limit

    Indexing RAM usage limit. Optional, default is 32M.

    Enforced memory usage limit that the indexer will not go above. Can be specified in bytes, or kilobytes (using K postfix), or megabytes (using M postfix); see the example. This limit will be automatically raised if set to extremely low value causing I/O buffers to be less than 8 KB; the exact lower bound for that depends on the indexed data size. If the buffers are less than 256 KB, a warning will be produced.

    Maximum possible limit is 2047M. Too low values can hurt indexing speed, but 256M to 1024M should be enough for most if not all datasets. Setting this value too high can cause SQL server timeouts. During the document collection phase, there will be periods when the memory buffer is partially sorted and no communication with the database is performed; and the database server can timeout. You can resolve that either by raising timeouts on SQL server side or by lowering mem_limit.

    Example:

    mem_limit = 256M
    # mem_limit = 262144K # same, but in KB
    # mem_limit = 268435456 # same, but in bytes
    

    11.3.2. max_iops

    Maximum I/O operations per second, for I/O throttling. Optional, default is 0 (unlimited).

    I/O throttling related option. It limits maximum count of I/O operations (reads or writes) per any given second. A value of 0 means that no limit is imposed.

    indexer can cause bursts of intensive disk I/O during indexing, and it might desired to limit its disk activity (and keep something for other programs running on the same machine, such as searchd). I/O throttling helps to do that. It works by enforcing a minimum guaranteed delay between subsequent disk I/O operations performed by indexer. Modern SATA HDDs are able to perform up to 70-100+ I/O operations per second (that's mostly limited by disk heads seek time). Limiting indexing I/O to a fraction of that can help reduce search performance dedgradation caused by indexing.

    Example:

    max_iops = 40
    

    11.3.3. max_iosize

    Maximum allowed I/O operation size, in bytes, for I/O throttling. Optional, default is 0 (unlimited).

    I/O throttling related option. It limits maximum file I/O operation (read or write) size for all operations performed by indexer. A value of 0 means that no limit is imposed. Reads or writes that are bigger than the limit will be split in several smaller operations, and counted as several operation by max_iops setting. At the time of this writing, all I/O calls should be under 256 KB (default internal buffer size) anyway, so max_iosize values higher than 256 KB must not affect anything.

    Example:

    max_iosize = 1048576
    

    11.3.4. max_xmlpipe2_field

    Maximum allowed field size for XMLpipe2 source type, bytes. Optional, default is 2 MB.

    Example:

    max_xmlpipe2_field = 8M
    

    11.3.5. write_buffer

    Write buffer size, bytes. Optional, default is 1 MB.

    Write buffers are used to write both temporary and final index files when indexing. Larger buffers reduce the number of required disk writes. Memory for the buffers is allocated in addition to mem_limit. Note that several (currently up to 4) buffers for different files will be allocated, proportionally increasing the RAM usage.

    Example:

    write_buffer = 4M
    

    11.3.6. max_file_field_buffer

    Maximum file field adaptive buffer size, bytes. Optional, default is 8 MB, minimum is 1 MB.

    File field buffer is used to load files referred to from sql_file_field columns. This buffer is adaptive, starting at 1 MB at first allocation, and growing in 2x steps until either file contents can be loaded, or maximum buffer size, specified by max_file_field_buffer directive, is reached.

    Thus, if there are no file fields are specified, no buffer is allocated at all. If all files loaded during indexing are under (for example) 2 MB in size, but max_file_field_buffer value is 128 MB, peak buffer usage would still be only 2 MB. However, files over 128 MB would be entirely skipped.

    Example:

    max_file_field_buffer = 128M
    

    11.3.7. on_file_field_error

    How to handle IO errors in file fields. Optional, default is ignore_field. Introduced in version 2.0.2-beta.

    When there is a problem indexing a file referenced by a file field (Section 11.1.29, “sql_file_field”), indexer can either index the document, assuming empty content in this particular field, or skip the document, or fail indexing entirely. on_file_field_error directive controls that behavior. The values it takes are:

    • ignore_field, index the current document without field;

    • skip_document, skip the current document but continue indexing;

    • fail_index, fail indexing with an error message.

    The problems that can arise are: open error, size error (file too big), and data read error. Warning messages on any problem will be given at all times, irregardless of the phase and the on_file_field_error setting.

    Note that with on_file_field_error = skip_document documents will only be ignored if problems are detected during an early check phase, and not during the actual file parsing phase. indexer will open every referenced file and check its size before doing any work, and then open it again when doing actual parsing work. So in case a file goes away between these two open attempts, the document will still be indexed.

    Example:

    on_file_field_errors = skip_document
    

    11.4. searchd program configuration options

    11.4.1. listen

    This setting lets you specify IP address and port, or Unix-domain socket path, that searchd will listen on. Introduced in version 0.9.9-rc1.

    The informal grammar for listen setting is:

    listen = ( address ":" port | port | path ) [ ":" protocol ]
    

    I.e. you can specify either an IP address (or hostname) and port number, or just a port number, or Unix socket path. If you specify port number but not the address, searchd will listen on all network interfaces. Unix path is identified by a leading slash.

    Starting with version 0.9.9-rc2, you can also specify a protocol handler (listener) to be used for connections on this socket. Supported protocol values are 'sphinx' (Sphinx 0.9.x API protocol) and 'mysql41' (MySQL protocol used since 4.1 upto at least 5.1). More details on MySQL protocol support can be found in Section 5.10, “MySQL protocol support and SphinxQL” section.

    Examples:

    listen = localhost
    listen = localhost:5000
    listen = 192.168.0.1:5000
    listen = /var/run/sphinx.s
    listen = 9312
    listen = localhost:9306:mysql41
    

    There can be multiple listen directives, searchd will listen for client connections on all specified ports and sockets. If no listen directives are found then the server will listen on all available interfaces using the default SphinxAPI port 9312. Starting with 1.10-beta, it will also listen on default SphinxQL port 9306. Both port numbers are assigned by IANA (see http://www.iana.org/assignments/port-numbers for details) and should therefore be available.

    Unix-domain sockets are not supported on Windows.

    11.4.2. address

    Interface IP address to bind on. Optional, default is 0.0.0.0 (ie. listen on all interfaces). DEPRECATED, use listen instead.

    address setting lets you specify which network interface searchd will bind to, listen on, and accept incoming network connections on. The default value is 0.0.0.0 which means to listen on all interfaces. At the time, you can not specify multiple interfaces.

    Example:

    address = 192.168.0.1
    

    11.4.3. port

    searchd TCP port number. DEPRECATED, use listen instead. Used to be mandatory. Default port number is 9312.

    Example:

    port = 9312
    

    11.4.4. log

    Log file name. Optional, default is 'searchd.log'. All searchd run time events will be logged in this file.

    Also you can use the 'syslog' as the file name. In this case the events will be sent to syslog daemon. To use the syslog option the sphinx must be configured '--with-syslog' on building.

    Example:

    log = /var/log/searchd.log
    

    11.4.5. query_log

    Query log file name. Optional, default is empty (do not log queries). All search queries will be logged in this file. The format is described in Section 5.9, “searchd query log formats”.

    In case of 'plain' format, you can use the 'syslog' as the path to the log file. In this case all search queries will be sent to syslog daemon with LOG_INFO priority, prefixed with '[query]' instead of timestamp. To use the syslog option the sphinx must be configured '--with-syslog' on building.

    Example:

    query_log = /var/log/query.log
    

    11.4.6. query_log_format

    Query log format. Optional, allowed values are 'plain' and 'sphinxql', default is 'plain'. Introduced in version 2.0.1-beta.

    Starting with version 2.0.1-beta, two different log formats are supported. The default one logs queries in a custom text format. The new one logs valid SphinxQL statements. This directive allows to switch between the two formats on search daemon startup. The log format can also be altered on the fly, using SET GLOBAL query_log_format=sphinxql syntax. Refer to Section 5.9, “searchd query log formats” for more discussion and format details.

    Example:

    query_log_format = sphinxql
    

    11.4.7. read_timeout

    Network client request read timeout, in seconds. Optional, default is 5 seconds. searchd will forcibly close the client connections which fail to send a query within this timeout.

    Example:

    read_timeout = 1
    

    11.4.8. client_timeout

    Maximum time to wait between requests (in seconds) when using persistent connections. Optional, default is five minutes.

    Example:

    client_timeout = 3600
    

    11.4.9. max_children

    Maximum amount of children to fork (or in other words, concurrent searches to run in parallel). Optional, default is 0 (unlimited).

    Useful to control server load. There will be no more than this much concurrent searches running, at all times. When the limit is reached, additional incoming clients are dismissed with temporarily failure (SEARCHD_RETRY) status code and a message stating that the server is maxed out.

    Example:

    max_children = 10
    

    11.4.10. pid_file

    searchd process ID file name. Mandatory.

    PID file will be re-created (and locked) on startup. It will contain head daemon process ID while the daemon is running, and it will be unlinked on daemon shutdown. It's mandatory because Sphinx uses it internally for a number of things: to check whether there already is a running instance of searchd; to stop searchd; to notify it that it should rotate the indexes. Can also be used for different external automation scripts.

    Example:

    pid_file = /var/run/searchd.pid
    

    11.4.11. max_matches

    Maximum amount of matches that the daemon keeps in RAM for each index and can return to the client. Optional, default is 1000.

    Introduced in order to control and limit RAM usage, max_matches setting defines how much matches will be kept in RAM while searching each index. Every match found will still be processed; but only best N of them will be kept in memory and return to the client in the end. Assume that the index contains 2,000,000 matches for the query. You rarely (if ever) need to retrieve all of them. Rather, you need to scan all of them, but only choose "best" at most, say, 500 by some criteria (ie. sorted by relevance, or price, or anything else), and display those 500 matches to the end user in pages of 20 to 100 matches. And tracking only the best 500 matches is much more RAM and CPU efficient than keeping all 2,000,000 matches, sorting them, and then discarding everything but the first 20 needed to display the search results page. max_matches controls N in that "best N" amount.

    This parameter noticeably affects per-query RAM and CPU usage. Values of 1,000 to 10,000 are generally fine, but higher limits must be used with care. Recklessly raising max_matches to 1,000,000 means that searchd will have to allocate and initialize 1-million-entry matches buffer for every query. That will obviously increase per-query RAM usage, and in some cases can also noticeably impact performance.

    CAVEAT EMPTOR! Note that there also is another place where this limit is enforced. max_matches can be decreased on the fly through the corresponding API call, and the default value in the API is also set to 1,000. So in order to retrieve more than 1,000 matches to your application, you will have to change the configuration file, restart searchd, and set proper limit in SetLimits() call. Also note that you can not set the value in the API higher than the value in the .conf file. This is prohibited in order to have some protection against malicious and/or malformed requests.

    Example:

    max_matches = 10000
    

    11.4.12. seamless_rotate

    Prevents searchd stalls while rotating indexes with huge amounts of data to precache. Optional, default is 1 (enable seamless rotation).

    Indexes may contain some data that needs to be precached in RAM. At the moment, .spa, .spi and .spm files are fully precached (they contain attribute data, MVA data, and keyword index, respectively.) Without seamless rotate, rotating an index tries to use as little RAM as possible and works as follows:

    1. new queries are temporarly rejected (with "retry" error code);

    2. searchd waits for all currently running queries to finish;

    3. old index is deallocated and its files are renamed;

    4. new index files are renamed and required RAM is allocated;

    5. new index attribute and dictionary data is preloaded to RAM;

    6. searchd resumes serving queries from new index.

    However, if there's a lot of attribute or dictionary data, then preloading step could take noticeble time - up to several minutes in case of preloading 1-5+ GB files.

    With seamless rotate enabled, rotation works as follows:

    1. new index RAM storage is allocated;

    2. new index attribute and dictionary data is asynchronously preloaded to RAM;

    3. on success, old index is deallocated and both indexes' files are renamed;

    4. on failure, new index is deallocated;

    5. at any given moment, queries are served either from old or new index copy.

    Seamless rotate comes at the cost of higher peak memory usage during the rotation (because both old and new copies of .spa/.spi/.spm data need to be in RAM while preloading new copy). Average usage stays the same.

    Example:

    seamless_rotate = 1
    

    11.4.13. preopen_indexes

    Whether to forcibly preopen all indexes on startup. Optional, default is 1 (preopen everything).

    Starting with 2.0.1-beta, the default value for this option is now 1 (foribly preopen all indexes). In prior versions, it used to be 0 (use per-index settings).

    When set to 1, this directive overrides and enforces preopen on all indexes. They will be preopened, no matter what is the per-index preopen setting. When set to 0, per-index settings can take effect. (And they default to 0.)

    Pre-opened indexes avoid races between search queries and rotations that can cause queries to fail occasionally. They also make searchd use more file handles. In most scenarios it's therefore preferred and recommended to preopen indexes.

    Example:

    preopen_indexes = 1
    

    11.4.14. unlink_old

    Whether to unlink .old index copies on succesful rotation. Optional, default is 1 (do unlink).

    Example:

    unlink_old = 0
    

    11.4.15. attr_flush_period

    When calling UpdateAttributes() to update document attributes in real-time, changes are first written to the in-memory copy of attributes (docinfo must be set to extern). Then, once searchd shuts down normally (via SIGTERM being sent), the changes are written to disk. Introduced in version 0.9.9-rc1.

    Starting with 0.9.9-rc1, it is possible to tell searchd to periodically write these changes back to disk, to avoid them being lost. The time between those intervals is set with attr_flush_period, in seconds.

    It defaults to 0, which disables the periodic flushing, but flushing will still occur at normal shut-down.

    Example:

    attr_flush_period = 900 # persist updates to disk every 15 minutes
    

    11.4.16. ondisk_dict_default

    Instance-wide defaults for ondisk_dict directive. Optional, default it 0 (precache dictionaries in RAM). Introduced in version 0.9.9-rc1.

    This directive lets you specify the default value of ondisk_dict for all the indexes served by this copy of searchd. Per-index directive take precedence, and will overwrite this instance-wide default value, allowing for fine-grain control.

    Example:

    ondisk_dict_default = 1 # keep all dictionaries on disk
    

    11.4.17. max_packet_size

    Maximum allowed network packet size. Limits both query packets from clients, and response packets from remote agents in distributed environment. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 8M. Introduced in version 0.9.9-rc1.

    Example:

    max_packet_size = 32M
    

    11.4.18. mva_updates_pool

    Shared pool size for in-memory MVA updates storage. Optional, default size is 1M. Introduced in version 0.9.9-rc1.

    This setting controls the size of the shared storage pool for updated MVA values. Specifying 0 for the size disable MVA updates at all. Once the pool size limit is hit, MVA update attempts will result in an error. However, updates on regular (scalar) attributes will still work. Due to internal technical difficulties, currently it is not possible to store (flush) any updates on indexes where MVA were updated; though this might be implemented in the future. In the meantime, MVA updates are intended to be used as a measure to quickly catchup with latest changes in the database until the next index rebuild; not as a persistent storage mechanism.

    Example:

    mva_updates_pool = 16M
    

    11.4.19. crash_log_path

    Deprecated debugging setting, path (formally prefix) for crash log files. Introduced in version 0.9.9-rc1. Deprecated in version 2.0.1-beta, as crash debugging information now gets logged into searchd.log in text form, and separate binary crash logs are no longer needed.

    11.4.20. max_filters

    Maximum allowed per-query filter count. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 256. Introduced in version 0.9.9-rc1.

    Example:

    max_filters = 1024
    

    11.4.21. max_filter_values

    Maximum allowed per-filter values count. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 4096. Introduced in version 0.9.9-rc1.

    Example:

    max_filter_values = 16384
    

    11.4.22. listen_backlog

    TCP listen backlog. Optional, default is 5.

    Windows builds currently (as of 0.9.9) can only process the requests one by one. Concurrent requests will be enqueued by the TCP stack on OS level, and requests that can not be enqueued will immediately fail with "connection refused" message. listen_backlog directive controls the length of the connection queue. Non-Windows builds should work fine with the default value.

    Example:

    listen_backlog = 20
    

    11.4.23. read_buffer

    Per-keyword read buffer size. Optional, default is 256K.

    For every keyword occurrence in every search query, there are two associated read buffers (one for document list and one for hit list). This setting lets you control their sizes, increasing per-query RAM use, but possibly decreasing IO time.

    Example:

    read_buffer = 1M
    

    11.4.24. read_unhinted

    Unhinted read size. Optional, default is 32K.

    When querying, some reads know in advance exactly how much data is there to be read, but some currently do not. Most prominently, hit list size in not currently known in advance. This setting lest you control how much data to read in such cases. It will impact hit list IO time, reducing it for lists larger than unhinted read size, but raising it for smaller lists. It will not affect RAM use because read buffer will be already allocated. So it should be not greater than read_buffer.

    Example:

    read_unhinted = 32K
    

    11.4.25. max_batch_queries

    Limits the amount of queries per batch. Optional, default is 32.

    Makes searchd perform a sanity check of the amount of the queries submitted in a single batch when using multi-queries. Set it to 0 to skip the check.

    Example:

    max_batch_queries = 256
    

    11.4.26. subtree_docs_cache

    Max common subtree document cache size, per-query. Optional, default is 0 (disabled).

    Limits RAM usage of a common subtree optimizer (see Section 5.11, “Multi-queries”). At most this much RAM will be spent to cache document entries per each query. Setting the limit to 0 disables the optimizer.

    Example:

    subtree_docs_cache = 8M
    

    11.4.27. subtree_hits_cache

    Max common subtree hit cache size, per-query. Optional, default is 0 (disabled).

    Limits RAM usage of a common subtree optimizer (see Section 5.11, “Multi-queries”). At most this much RAM will be spent to cache keyword occurrences (hits) per each query. Setting the limit to 0 disables the optimizer.

    Example:

    subtree_hits_cache = 16M
    

    11.4.28. workers

    Multi-processing mode (MPM). Optional; allowed values are none, fork, prefork, and threads. Default is fork on Unix based systems, and threads on Windows. Introduced in version 1.10-beta.

    Lets you choose how searchd processes multiple concurrent requests. The possible values are:

    none

    All requests will be handled serially, one-by-one. Prior to 1.x, this was the only mode available on Windows.

    fork

    A new child process will be forked to handle every incoming request. Historically, this is the default mode.

    prefork

    On startup, searchd will pre-fork a number of worker processes, and pass the incoming requests to one of those children.

    threads

    A new thread will be created to handle every incoming request. This is the only mode compatible with RT indexing backend.

    Historically, searchd used fork-based model, which generally performs OK but spends a noticeable amount of CPU in fork() system call when there's a high amount of (tiny) requests per second. Prefork mode was implemented to alleviate that; with prefork, worker processes are basically only created on startup and re-created on index rotation, somewhat reducing fork() call pressure.

    Threads mode was implemented along with RT backend and is required to use RT indexes. (Regular disk-based indexes work in all the available modes.)

    Example:

    workers = threads
    

    11.4.29. dist_threads

    Max local worker threads to use for parallelizable requests (searching a distributed index; building a batch of snippets). Optional, default is 0, which means to disable in-request parallelism. Introduced in version 1.10-beta.

    Distributed index can include several local indexes. dist_threads lets you easily utilize multiple CPUs/cores for that (previously existing alternative was to specify the indexes as remote agents, pointing searchd to itself and paying some network overheads).

    When set to a value N greater than 1, this directive will create up to N threads for every query, and schedule the specific searches within these threads. For example, if there are 7 local indexes to search and dist_threads is set to 2, then 2 parallel threads would be created: one that sequentially searches 4 indexes, and another one that searches the other 3 indexes.

    In case of CPU bound workload, setting dist_threads to 1x the number of cores is advised (creating more threads than cores will not improve query time). In case of mixed CPU/disk bound workload it might sometimes make sense to use more (so that all cores could be utilizes even when there are threads that wait for I/O completion).

    Note that dist_threads does not require threads MPM. You can perfectly use it with fork or prefork MPMs too.

    Starting with version 2.0.1-beta, building a batch of snippets with load_files flag enabled can also be parallelized. Up to dist_threads threads are be created to process those files. That speeds up snippet extraction when the total amount of document data to process is significant (hundreds of megabytes).

    Example:

    index dist_test
    {
    	type = distributed
    	local = chunk1
    	local = chunk2
    	local = chunk3
    	local = chunk4
    }
    
    # ...
    
    dist_threads = 4
    

    11.4.30. binlog_path

    Binary log (aka transaction log) files path. Optional, default is build-time configured data directory. Introduced in version 1.10-beta.

    Binary logs are used for crash recovery of RT index data, and also of attributes updates of plain disk indices that would otherwise only be stored in RAM untill flush. When logging is enabled, every transaction COMMIT-ted into RT index gets written into a log file. Logs are then automatically replayed on startup after an unclean shutdown, recovering the logged changes.

    binlog_path directive specifies the binary log files location. It should contain just the path; searchd will create and unlink multiple binlog.* files in that path as necessary (binlog data, metadata, and lock files, etc).

    Empty value disables binary logging. That improves performance, but puts RT index data at risk.

    WARNING! It is strongly recommended to always explicitly define 'binlog_path' option in your config. Otherwise, the default path, which in most cases is the same as working folder, may point to the folder with no write access (for example, /usr/local/var/data). In this case, the searchd will not start at all.

    Example:

    binlog_path = # disable logging
    binlog_path = /var/data # /var/data/binlog.001 etc will be created
    

    11.4.31. binlog_flush

    Binary log transaction flush/sync mode. Optional, default is 2 (flush every transaction, sync every second). Introduced in version 1.10-beta.

    This directive controls how frequently will binary log be flushed to OS and synced to disk. Three modes are supported:

    • 0, flush and sync every second. Best performance, but up to 1 second worth of committed transactions can be lost both on daemon crash, or OS/hardware crash.

    • 1, flush and sync every transaction. Worst performance, but every committed transaction data is guaranteed to be saved.

    • 2, flush every transaction, sync every second. Good performance, and every committed transaction is guaranteed to be saved in case of daemon crash. However, in case of OS/hardware crash up to 1 second worth of committed transactions can be lost.

    For those familiar with MySQL and InnoDB, this directive is entirely similar to innodb_flush_log_at_trx_commit. In most cases, the default hybrid mode 2 provides a nice balance of speed and safety, with full RT index data protection against daemon crashes, and some protection against hardware ones.

    Example:

    binlog_flush = 1 # ultimate safety, low speed
    

    11.4.32. binlog_max_log_size

    Maximum binary log file size. Optional, default is 0 (do not reopen binlog file based on size). Introduced in version 1.10-beta.

    A new binlog file will be forcibly opened once the current binlog file reaches this limit. This achieves a finer granularity of logs and can yield more efficient binlog disk usage under certain borderline workloads.

    Example:

    binlog_max_log_size = 16M
    

    11.4.33. collation_server

    Default server collation. Optional, default is libc_ci. Introduced in version 2.0.1-beta.

    Specifies the default collation used for incoming requests. The collation can be overridden on a per-query basis. Refer to Section 5.12, “Collations” section for the list of available collations and other details.

    Example:

    collation_server = utf8_ci
    

    11.4.34. collation_libc_locale

    Server libc locale. Optional, default is C. Introduced in version 2.0.1-beta.

    Specifies the libc locale, affecting the libc-based collations. Refer to Section 5.12, “Collations” section for the details.

    Example:

    collation_libc_locale = fr_FR
    

    11.4.35. plugin_dir

    Trusted location for the dynamic libraries (UDFs). Optional, default is empty (no location). Introduced in version 2.0.1-beta.

    Specifies the trusted directory from which the UDF libraries can be loaded. Requires workers = thread to take effect.

    Example:

    workers = threads
    plugin_dir = /usr/local/sphinx/lib
    

    11.4.36. mysql_version_string

    A server version string to return via MySQL protocol. Optional, default is empty (return Sphinx version). Introduced in version 2.0.1-beta.

    Several picky MySQL client libraries depend on a particular version number format used by MySQL, and moreover, sometimes choose a different execution path based on the reported version number (rather than the indicated capabilities flags). For instance, Python MySQLdb 1.2.2 throws an exception when the version number is not in X.Y.ZZ format; MySQL .NET connector 6.3.x fails internally on version numbers 1.x along with a certain combination of flags, etc. To workaround that, you can use mysql_version_string directive and have searchd report a different version to clients connecting over MySQL protocol. (By default, it reports its own version.)

    Example:

    mysql_version_string = 5.0.37
    

    11.4.37. rt_flush_period

    RT indexes RAM chunk flush check period, in seconds. Optional, default is 0 (do not flush). Introduced in version 2.0.1-beta.

    Actively updated RT indexes that however fully fit in RAM chunks can result in ever-growing binlogs, impacting disk use and crash recovery time. With this directive the search daemon performs periodic flush checks, and eligible RAM chunks can get saved, enabling consequential binlog cleanup. See Section 4.4, “Binary logging” for more details.

    Example:

    rt_flush_period = 3600
    

    11.4.38. thread_stack

    Per-thread stack size. Optional, default is 64K. Introduced in version 2.0.1-beta.

    In the workers = threads mode, every request is processed with a separate thread that needs its own stack space. By default, 64K per thread are allocated for stack. However, extremely complex search requests might eventually exhaust the default stack and require more. For instance, a query that matches a few thousand keywords (either directly or through term expansion) can eventually run out of stack. Previously, that resulted in crashes. Starting with 2.0.1-beta, searchd attempts to estimate the expected stack use, and blocks the potentially dangerous queries. To process such queries, you can either the thread stack size by using the thread_stack directive (or switch to a different workers setting if that is possible).

    A query with N levels of nesting is estimated to require approximately 30+0.12*N KB of stack, meaning that the default 64K is enough for queries with upto 300 levels, 150K for upto 1000 levels, etc. If the stack size limit is not met, searchd fails the query and reports the required stack size in the error message.

    Example:

    thread_stack = 256K
    

    11.4.39. expansion_limit

    The maximum number of expanded keywords for a single wildcard. Optional, default is 0 (no limit). Introduced in version 2.0.1-beta.

    When doing substring searches against indexes built with dict = keywords enabled, a single wildcard may potentially result in thousands and even millions of matched keywords (think of matching 'a*' against the entire Oxford dictionary). This directive lets you limit the impact of such expansions. Setting expansion_limit = N restricts expansions to no more than N of the most frequent matching keywords (per each wildcard in the query).

    Example:

    expansion_limit = 16
    

    11.4.40. compat_sphinxql_magics

    Legacy SphinxQL quirks compatiblity mode. Optional, default is 1 (keep compatibility). Introduced in version 2.0.1-beta.

    Starting with version 2.0.1-beta, we're bringing SphinxQL in closer compliance with standard SQL. However, existing applications must not get broken, and compat_sphinxql_magics lets you upgrade safely. It defauls to 1, which enables the compatibility mode. However, SphinxQL compatibility mode is now deprecated and will be removed once we complete bringing SphinxQL in line with standard SQL syntax. So it's advised to update the applications utilising SphinxQL and then switch the daemon to the new, more SQL compliant mode by setting compat_sphinxql_magics = 0. Please refer to Section 7.24, “SphinxQL upgrade notes, version 2.0.1-beta” for the details and update instruction.

    Example:

    compat_sphinxql_magics = 0 # the future is now
    

    11.4.41. watchdog

    Threaded server watchdog. Optional, default is 1 (watchdog enabled). Introduced in version 2.0.1-beta.

    A crashed query in threads multi-processing mode (workers = threads) can take down the entire server. With watchdog feature enabled, searchd additionally keeps a separate lightweight process that monitors the main server process, and automatically restarts the latter in case of abnormal termination. Watchdog is enabled by default.

    Example:

    watchdog = 0 # disable watchdog
    

    11.4.42. prefork_rotation_throttle

    Delay between restarting preforked children on index rotation, in milliseconds. Optional, default is 0 (no delay). Introduced in version 2.0.2-beta.

    When running in workers = prefork mode, every index rotation needs to restart all children to propagate the newly loaded index data changes. Restarting all of them at once might put excessive strain on CPU and/or network connections. (For instance, when the application keeps a bunch of open persistent connections to different children, and all those children restart.) Those bursts can be throttled down with prefork_rotation_throttle directive. Note that the children will be restarted sequentially, and thus "old" results might persist for a few more seconds. For instance, if prefork_rotation_throttle is set to 50 (milliseconds), and there are 30 children, then the last one would only be actually restarted 1.5 seconds (50*30=1500 milliseconds) after the "rotation finished" message in the searchd event log.

    Example:

    prefork_rotation_throttle = 50 # throttle children restarts by 50 msec each
    

    Appendix A. Sphinx revision history

    A.1. Version 2.0.4-release, 02 mar 2012

    Bug fixes

    • fixed #605, pack vs mysql compress

    • fixed #783, #862, #917, #985, #990, #1032 documentation bugs

    • fixed #885, bitwise AND/OR were not available via API

    • fixed #984, crash on indexing data with MAGIC_CODE_ZONE symbol

    • fixed #1004, RT index loses words from dictionary on segments merging with id64 enabled

    • fixed #1035, daemon doesn't properly handle FDs in case of socket overflow FD_SETSIZE ( *nix, preopen_indexes=0, worker=threads )

    • fixed #1038, quoted string for API select

    • fixed #1046, head SPZ overflow, snippet generation at non fast with SPZ

    • fixed #1048, distributed index can't sort \ filter because of missed attributes

    • fixed #1050, expression ranker vs agents

    • fixed #1051, added MVA64 support to UDFs

    • fixed #1054, max_query_time not handled properly on searching at RT index

    • fixed #1055, expansion_limit on searching at RT disk chunks

    • fixed #1057, daemon crashes on generating snippet with 0 documents provided

    • fixed #1060, load_files_scattered don't work

    • fixed #1065, libsphinxclient vs distribute index (agents)

    • fixed #1067, modifiers were not escaped in legacy query emulation

    • fixed #1071, master - agent communication got slower for a large query

    • fixed #1076, #1077, (redundant copying, and a possible mutex leak with uservars)

    • fixed #1078, blended vs FIELD_END

    • fixed #1084 crash \ index corruption on loading persist MVA

    • fixed #1091, RT attach of plain index with string \ MVA attributes prior regular attributes

    • fixed #1092, update got binloged with wrong TID

    • fixed #1098, crash on creating large expression

    • fixed #1099, cleaning up temporary files on fail of indexing

    • fixed #1100, missing xmlpipe_attr_bigint config directive

    • fixed #1101, now ignoring dashes within keywords when dash is not in charset_table

    • fixed #1103, ZONE operator incorrectly works on more than one keywords in a simple zone

    • fixed #1106, optimized WHERE id=value, WHERE id IN (values_list) clauses used in SELECT, UPDATE statements

    • fixed #1112, Sphinx doesn't work out-of-the-box because the collision of binlog_path option

    • fixed #1116, crash on FLUSH RTINDEX unknown-index-name

    • fixed #1117, occasional RT headers corruption (leading to crashes and/or missing results)

    • fixed #1119, missing expression ranker support in SphinxSE

    • fixed #1120, negative total_found, docs and hits counter on huge indexes

    A.2. Version 2.0.3-release, 23 dec 2011

    Bug fixes

    • fixed #1031, SphinxQL parsing syntax for MVA at insert \ replace statements

    • fixed #1027, stalls on attribute update in high-concurrency load

    • fixed #1026, daemon crash on malformed API command

    • fixed #1021, max_children option has been ignored with worker=threads

    • fixed #1020, crash on large attribute files loading

    • fixed #1014, crash on rotation when index has been removed from config file (worker=threads, *nix box)

    • fixed #1001, broken MVA files in RT index while saving disk chunk

    • fixed #995, crash on empty MVA updates

    • fixed #994, crash on daemon shutdown with seamless_rotate=0 and workers=threads

    • fixed #993, #998, crash on replay DELETE statement vs RT index with dict=keywords, fixed sequential INSERT into dict=keywords index right after INSERT into dict=crc index

    • fixed #991, crash on indexing mssql source with mssql_unicode enabled

    • fixed #983, #950, crash on host name lookup (SphinxSE with MySQL 5.5)

    • fixed #981, snippet inconsistency with allow_empty=0

    • fixed #980, broken index produced by index merge in rare cases

    • fixed #971, absent error message at master on agent "maxed out"

    • fixed #695, #815, #835, #866, malformed warnings in SphinxQL

    • fixed build of SphinxSE with MySQL 5.1

    • fixed crash log for 'fork' and 'prefork' workers

    A.3. Version 2.0.2-beta, 15 nov 2011

    Major new features

    New features

    • added support for upto 256 searchable fields (was upto 32 before)

    • added FIBONACCI() function to expressions

    • added load_files_scattered option to snippets

    • added implicit attribute type promotions in multi-index result sets (#939)

    • added index names to indexer progress message on merge (#928)

    • added --replay-flags switch to searchd

    • added string attribute support and a few previously missing snippets options to SphinxSE

    • added previously missing Status(), SetConnectTimeout() API calls to Python API

    • added ORDER BY RAND() support to SELECT statement

    • added Sphinx version to Windows crash log

    • added RT index support to indextool --check (checks disk chunks only) (#877)

    • added prefork_rotation_throttle directive (preforked children restart delay, in milliseconds) (#873)

    • added on_file_field_error directive (different sql_file_field handling modes)

    • added manpages for all the programs

    • added syslog logging support

    • added sentence, paragraph, and zone support in html_strip_mode=retain mode to snippets

    • optimized search performance with many ZONE operators

    • improved suggestion tool (added Levenshtein limit, removed extra DB fetch)

    • improved sentence extraction (handles salutations, starting initials better now)

    • changed max_filter_values sanity check to 10M values

    New SphinxQL features

    • added FLUSH RTINDEX statement

    • added dist_threads directive (parallel processing), load_files, load_files_scattered, batch syntax (multiple documents) support to CALL SNIPPETS statement

    • added OPTION comment='...' support to SELECT statement (#944)

    • added SHOW VARIABLES statement

    • added dummy handlers for SET TRANSACTION, SET NAMES, SELECT @@sysvar statements, and for sql_auto_is_null, sql_mode, and @@-style variables (like @@tx_isolation) in SET statement (better MySQL frameworks/connectors support)

    • added complete SphinxQL error logging (all errors are logged now, not just SELECTs)

    • improved SELECT statement syntax, made expressions aliases optional

    Bug fixes

    • fixed #982, empty binlogs prevented upgraded daemon from starting up

    • fixed #978, libsphinxclient build failed on sparc/sparc64 solaris

    • fixed #977, eliminated (most) compiler warnings

    • fixed #969, broken expression MVA/string argument type check prevented IF(IN(mva..)) and other valid expressions from working

    • fixed #966, NOT IN @global_var syntax was not supported

    • fixed #958, mem_limit over INT_MAX was not clamped

    • fixed #954, UTF-8 snippets could crash on malformed data

    • fixed #951, UTF-8 snippets could hang on malformed data

    • fixed #947, bad float column type was reported via SphinxQL, breaking some clients

    • fixed #940, group-by with a small enough max_matches limit could occasionaly crash and/or sort wrongly

    • fixed #932, sending huge queries to agents occasionally failed (mainly on Windows)

    • fixed #926, snippets did not highlight widlcard matches with morphology enabled

    • fixed #918, crash logger did not report a proper query in dist_threads case

    • fixed #916, watchdog caused (endless) respawns if there was a crash during shutdown

    • fixed #904, attribute names were not forcibly case-folded in some API calls (eg. SetGroupDistinct)

    • fixed #902, query parser did not support stopword_step=0

    • fixed #897, network sockets dangled (open but unattended) while replaying binlog

    • fixed #855, allow_empty option in snippets did not always work correctly

    • fixed #854, indexing with many bigint attributes and docinfo=inline crashed

    • fixed #838, RT MVA insertion did not sort MVA values, caused matching issues

    • fixed #833, duplicate MVA values were not eliminated on update

    • fixed #832, certain (overshort/incorrect) documents crashed indexing MS SQL Unicode columns

    • fixed #829, query parser did not properly handle numerics with blend_chars

    • fixed #814, group-by string attributes in RT indexes dit not always work correctly

    • fixed #812, utf8 stemming produced unexpected stems on words with single-byte chars

    • fixed #808, huge queries crashed logging with query_log_format=sphinxql

    • fixed #806, stray single-star keyword crashed on querying

    • fixed #798, snippets ignored index_exact_words in query_mode

    • fixed #797, RT klist loader had an occasional off-by-one crash

    • fixed #791, preopen_indexes erroneously defaulted to 0 on Windows

    • fixed #790, huge dictionaries (over 4 GB) did not work

    • fixed #786, inplace_enable could occasionally corrupt the indexes

    • fixed #775, doc had a typo (soundex vs metaphone)

    • fixed #772, snippets duplicated blended chars on a SPZ boundary

    • fixed #762, query parser truncated digit-only keywords over 15 digits

    • fixed #736, query parser dit not properly handle blended/special char sequence

    • fixed #726, rotation of an index with a changed attribute count crashed

    • fixed #687, querying multiple indexes with index weights and sort-by expression produced incorrect (unadjusted) weights

    • fixed #585, (unsupported) string ordinals were silently zeroed out with docinfo=inline (instead of failing)

    • fixed #583, certain keywords could occasionally crash multiforms

    • fixed that concurrent MVA updates could crash

    • fixed that query parser did not ignore a pure blended token with a leading modifier

    • fixed that query parser did not properly handle a modifier followed by a dash

    • fixed that substring indexing with dict=crc did not support index_exact_words and zones

    • fixed that in a rare edge case common subtree cache could crash

    • fixed that empty result set returned the full schema (rather than SELECT-ed columns)

    • fixed that SphinxQL did not have a sanity check for (currently unsupported) result set schemas over 250 attributes

    • fixed that updates on regular indexes were not binlogged

    • fixed that multi-query optimization check for expressions did not handle multi-index case

    • fixed that SphinxSE did not build vs MySQL 5.5 release

    • fixed that proximity_bm25 ranker could yield incorrect weight on duplicated keywords

    • fixed that prefix expansion with dict=keyword occasionally crashed

    • fixed that strip_path did not work on RT disk chunks

    • fixed that exclude filters were not properly logged in query_log_format=sphinxql mode

    • fixed that plain string attribute check in indextool --check was broken

    • fixed that Java API did not let specify a connection timeout

    • fixed that ordinal and wordcount attributes could not be fetched via SphinxQL

    • fixed that in a rare edge case OR/ORDER would not match properly

    • fixed that sending (huge) query response did not handle EINTR properly

    • fixed that SPH04 ranker could yield incorrectly high weight in some cases

    • fixed that C API did not let zero out cutoff, max_matches settings

    • fixed that on a persistent connection there were occasionally issues handling signals while doing network reads/waitss

    • fixed that in a rare edge case (field start modifier in a certain complex query) querying crashed

    • fixed that snippets did not support dist_threads with load_files=0

    • fixed that in some extremely rare edge cases tiny parts of an index could end up corrupted with dict=keywords

    • fixed that field/zone conditions were not propagated to expanded keywords with dict=keywords

    A.4. Version 2.0.1-beta, 22 apr 2011

    New general features

    New SphinxQL features

    New command-line switches

    • added --print-queries switch to indexer that dumps SQL queries it runs

    • added --sighup-each switch to indexer that rotates indexes one by one

    • added --strip-path switch to searchd that skips file paths embedded in the index(-es)

    • added --dumpconfig switch to indextool that dumps an index header in sphinx.conf format

    Major changes and optimizations

    • changed default preopen_indexes value to 1

    • optimized English stemmer (results in 1.3x faster snippets and indexing with morphology=stem_en)

    • optimized snippets, 1.6x general speedup

    • optimized const-list parsing in SphinxQL

    • optimized full-document highlighting CPU/RAM use

    • optimized binlog replay (improved performance on K-list update)

    Bug fixes

    • fixed #767, joined fields vs ODBC sources

    • fixed #757, wordforms shared by indexes with different settings

    • fixed #733, loading of indexes in formats prior to v.14

    • fixed #763, occasional snippets failures

    • fixed #648, occasionally missed rotations on multiple SIGHUPs

    • fixed #750, an RT segment merge leading to false positives and/or crashes in some cases

    • fixed #755, zones in snippets output

    • fixed #754, stopwords counting at snippet passage generation

    • fixed #723, fork/prefork index rotation in children processes

    • fixed #696, freeze on zero threshold in quorum operator

    • fixed #732, query escaping in SphinxSE

    • fixed #739, occasional crashes in MT mode on result set send

    • fixed #746, crash with a named list in SphinxQL option

    • fixed #674, AVG vs group order

    • fixed #734, occasional crashes attempting to report NULL errors

    • fixed #829, tail hits within field position modifier

    • fixed #712, missing query_mode, force_all_words snippet option defaults in Java API

    • fixed #721, added dupe removal on RT batch INSERT/REPLACE

    • fixed #720, potential extraneous highlighting after a blended keyword

    • fixed #702, exceptions vs star search

    • fixed #666, ext2 query grouping vs exceptions

    • fixed #688, WITHIN GROUP ORDER BY related crash

    • fixed #660, multi-queue batches vs dist_threads

    • fixed #678, crash on dict=keywords vs xmlpipe vs min_prefix_len

    • fixed #596, ECHILD vs scripted configs

    • fixed #653, dependency in expression, sorting, grouping

    • fixed #661, concurrent distributed searches vs workers=threads

    • fixed #646, crash on status query via UNIX socket

    • fixed #589, libexpat.dll missing from some Win32 build types

    • fixed #574, quorum match order

    • fixed multiple documentation issues (#372, #483, #495, #601, #623, #632, #654)

    • fixed that ondisk_dict did not affect RT indexes

    • fixed that string attributes check in indextool --check was erroneously sensitive to string data order

    • fixed a rare crash when using BEFORE operator

    • fixed an issue with multiforms vs BuildKeywords()

    • fixed an edge case in OR operator (emitted wrong hits order sometimes)

    • fixed aliasing in docinfo accessors that lead to very rare crashes and/or missing results

    • fixed a syntax error on a short token at the end of a query

    • fixed id64 filtering and performance degradation with range filters

    • fixed missing rankers in libsphinxclient

    • fixed missing SPH04 ranker in SphinxSE

    • fixed column names in sql_attr_multi sample (works with example.sql now)

    • fixed an issue with distributed local+remote setup vs aggregate functions

    • fixed case sensitive columns names in RT indexes

    • fixed a crash vs strings from multiple indexes in result set

    • fixed blended keywords vs snippets

    • fixed secure_connection vs MySQL protocol vs MySQL.NET connector

    • fixed that Python API did not works with Python 2.3

    • fixed overshort_step vs snippets

    • fixed keyword staistics vs dist_threads searching

    • fixed multiforms vs query parsing (vs quorum)

    • fixed missed quorum words vs RT segments

    • fixed blended keywords occasionally skipping extra character when querying (eg "abc[]")

    • fixed Python API to handle int32 values

    • fixed prefix and infix indexing of joined fields

    • fixed MVA ranged query

    • fixed missing blended state reset on document boundary

    • fixed a crash on missing index while replaying binlog

    • fixed an error message on filter values overrun

    • fixed passage duplication in snippets in weight_order mode

    • fixed select clauses over 1K vs remote agents

    • fixed overshort accounting vs soft-whitespace tokens

    • fixed rotation vs workers=threads

    • fixed schema issues vs distributed indexes

    • fixed blended-escaped sequence parsing issue

    • fixed MySQL IN clause (values order etc)

    • fixed that post_index did not execute when 0 documents were succesfully indexed

    • fixed field position limit vs many hits

    • fixed that joined fields missed an end marker at field end

    • fixed that xxx_step settings were missing from .sph index header

    • fixed libsphinxclient missing request cleanup in sphinx_query() (eg after network errors)

    • fixed that index_weights were ignored when grouping

    • fixed multi wordforms vs blend_chars

    • fixed broken MVA output in SphinxQL

    • fixed a few RT leaks

    • fixed an issue with RT string storage going missing

    • fixed an issue with repeated queries vs dist_threads

    • fixed an issue with string attributes vs buffer overrun in SphinxQL

    • fixed unexpected character data warnings within ignored xmlpipe tags

    • fixed a crash in snippets with NEAR syntax query

    • fixed passage duplication in snippets

    • fixed libsphinxclient SIGPIPE handling

    • fixed libsphinxclient vs VS2003 compiler bug

    A.5. Version 1.10-beta, 19 jul 2010

    • added RT indexes support (Chapter 4, Real-time indexes)

    • added prefork and threads support (workers directives)

    • added multi-threaded local searches in distributed indexes (dist_threads directive)

    • added common subquery cache (subtree_docs_cache, subtree_hits_cache directives)

    • added string attributes support (sql_attr_string, sql_field_string, xml_attr_string, xml_field_string directives)

    • added indexing-time word counter (sql_attr_str2wordcount, sql_field_str2wordcount directives)

    • added CALL SNIPPETS(), CALL KEYWORDS() SphinxQL statements

    • added field_weights, index_weights options to SphinxQL SELECT statement

    • added insert-only SphinxQL-talking tables to SphinxSE (connection='sphinxql://host[:port]/index')

    • added select option to SphinxSE queries

    • added backtrace on crash to searchd

    • added SQL+FS indexing, aka loading files by names fetched from SQL (sql_file_field directive)

    • added a watchdog in threads mode to searchd

    • added automatic row phantoms elimination to index merge

    • added hitless indexing support (hitless_words directive)

    • added --check, --strip-path, --htmlstrip, --dumphitlist ... --wordid switches to indextool

    • added --stopwait, --logdebug switches to searchd

    • added --dump-rows, --verbose switches to indexer

    • added "blended" characters indexing support (blend_chars directive)

    • added joined/payload field indexing (sql_joined_field directive)

    • added FlushAttributes() API call

    • added query_mode, force_all_words, limit_passages, limit_words, start_passage_id, load_files, html_strip_mode, allow_empty options, and %PASSAGE_ID% macro in before_match, after_match options to BuildExcerpts() API call

    • added @groupby/@count/@distinct columns support to SELECT (but not to expressions)

    • added query-time keyword expansion support (expand_keywords directive, SPH_RANK_SPH04 ranker)

    • added query batch size limit option (max_batch_queries directive; was hardcoded)

    • added SINT() function to expressions

    • improved SphinxQL syntax error reporting

    • improved expression optimizer (better constant handling)

    • improved dash handling within keywords (no longer treated as an operator)

    • improved snippets (better passage selection/trimming, around option now a hard limit)

    • optimized index format that yields ~20-30% smaller indexes

    • optimized sorting code (indexing time 1-5% faster on average; 100x faster in worst case)

    • optimized searchd startup time (moved .spa preindexing to indexer), added a progress bar

    • optimized queries against indexes with many attributes (eliminated redundant copying)

    • optimized 1-keyword queries (performace regression introduced in 0.9.9)

    • optimized SphinxQL protocol overheads, and performance on bigger result sets

    • optimized unbuffered attributes writes on index merge

    • changed attribute handling, duplicate names are strictly forbidden now

    • fixed that SphinxQL sessions could stall shutdown

    • fixed consts with leading minus in SphinxQL

    • fixed AND/OR precedence in expressions

    • fixed #334, AVG() on integers was not computed in floats

    • fixed #371, attribute flush vs 2+ GB files

    • fixed #373, segfault on distributed queries vs certain libc versions

    • fixed #398, stopwords not stopped in prefix/infix indexes

    • fixed #404, erroneous MVA failures in indextool --check

    • fixed #408, segfault on certain query batches (regular scan, plus a scan with MVA groupby)

    • fixed #431, occasional shutdown hangs in preforked workers

    • fixed #436, trunk checkout builds vs Solaris sh

    • fixed #440, escaping vs parentheses declared as valid in charset_table

    • fixed #442, occasional non-aligned free in MVA indexing

    • fixed #447, occasional crashes in MVA indexing

    • fixed #449, pconn busyloop on aborted clients on certain arches

    • fixed #465, build issue on Alpha

    • fixed #468, build issue in libsphinxclient

    • fixed #472, multiple stopword files failing to load

    • fixed #489, buffer overflow in query logging

    • fixed #493, Python API assertion after error returned from Query()

    • fixed #500, malformed MySQL packet when sending MVAs

    • fixed #504, SIGPIPE in libsphinxclient

    • fixed #506, better MySQL protocol commands support in SphinxQL (PING etc)

    • fixed #509, indexing ranged results from stored procedures

    A.6. Version 0.9.9-release, 02 dec 2009

    • added Open, Close, Status calls to libsphinxclient (C API)

    • added automatic persistent connection reopening to PHP, Python APIs

    • added 64-bit value/range filters, fullscan mode support to SphinxSE

    • MAJOR CHANGE, our IANA assigned ports are 9312 and 9306 respectively (goodbye, trusty 3312)

    • MAJOR CHANGE, erroneous filters now fail with an error (were silently ignored before)

    • optimized unbuffered .spa writes on merge

    • optimized 1-keyword queries ranking in extended2 mode

    • fixed #441 (IO race in case of highly conccurent load on a preopened)

    • fixed #434 (distrubuted indexes were not searchable via MySQL protocol)

    • fixed #317 (indexer MVA progress counter)

    • fixed #398 (stopwords not removed from search query)

    • fixed #328 (broken cutoff)

    • fixed #250 (now quoting paths w/spaces when installing Windows service)

    • fixed #348 (K-list was not updated on merge)

    • fixed #357 (destination index were not K-list-filtered on merge)

    • fixed #369 (precaching .spi files over 2 GBs)

    • fixed #438 (missing boundary proximity matches)

    • fixed #371 (.spa flush in case of files over 2 GBs)

    • fixed #373 (crashes on distributed queries via mysql proto)

    • fixed critical bugs in hit merging code

    • fixed #424 (ordinals could be misplaced during indexing in case of bitfields etc)

    • fixed #426 (failing SE build on Solaris; thanks to Ben Beecher)

    • fixed #423 (typo in SE caused crash on SHOW STATUS)

    • fixed #363 (handling of read_timeout over 2147 seconds)

    • fixed #376 (minor error message mismatch)

    • fixed #413 (minus in SphinxQL)

    • fixed #417 (floats w/o leading digit in SphinxQL)

    • fixed #403 (typo in SetFieldWeights name in Java API)

    • fixed index rotation vs persistent connections

    • fixed backslash handling in SphinxQL parser

    • fixed uint unpacking vs. PHP 5.2.9 (possibly other versions)

    • fixed #325 (filter settings send from SphinxSE)

    • fixed #352 (removed mysql wrapper around close() in SphinxSE)

    • fixed #389 (display error messages through SphinxSE status variable)

    • fixed linking with port-installed iconv on OS X

    • fixed negative 64-bit unpacking in PHP API

    • fixed #349 (escaping backslash in query emulation mode)

    • fixed #320 (disabled multi-query route when select items differ)

    • fixed #353 (better quorum counts check)

    • fixed #341 (merging of trailing hits; maybe other ranking issues too)

    • fixed #368 (partially; @field "" caused crashes; now resets field limit)

    • fixed #365 (field mask was leaking on field-limited terms)

    • fixed #339 (updated debug query dumper)

    • fixed #361 (added SetConnectTimeout() to Java API)

    • fixed #338 (added missing fullscan to mode check in Java API)

    • fixed #323 (added floats support to SphinxQL)

    • fixed #340 (support listen=port:proto syntax too)

    • fixed #332 (\r is legal SphinxQL space now)

    • fixed xmlpipe2 K-lists

    • fixed #322 (safety gaps in mysql protocol row buffer)

    • fixed #313 (return keyword stats for empty indexes too)

    • fixed #344 (invalid checkpoints after merge)

    • fixed #326 (missing CLOCK_xxx on FreeBSD)

    A.7. Version 0.9.9-rc2, 08 apr 2009

    • added IsConnectError(), Open(), Close() calls to Java API (bug #240)

    • added read_buffer, read_unhinted directives

    • added checks for build options returned by mysql_config (builds on Solaris now)

    • added fixed-RAM index merge (bug #169)

    • added logging chained queries count in case of (optimized) multi-queries

    • added GEODIST() function

    • added --status switch to searchd

    • added MySpell (OpenOffice) affix file support (bug #281)

    • added ODBC support (both Windows and UnixODBC)

    • added support for @id in IN() (bug #292)

    • added support for aggregate functions in GROUP BY (namely AVG, MAX, MIN, SUM)

    • added MySQL UDF that builds snippets using searchd

    • added write_buffer directive (defaults to 1M)

    • added xmlpipe_fixup_utf8 directive

    • added suggestions sample

    • added microsecond precision int64 timer (bug #282)

    • added listen_backlog directive

    • added max_xmlpipe2_field directive

    • added initial SphinxQL support to mysql41 handler, SELECT .../SHOW WARNINGS/STATUS/META are handled

    • added support for different network protocols, and mysql41 protocol

    • added fieldmask ranker, updated SphinxSE list of rankers

    • added mysql_ssl_xxx directives

    • added --cpustats (requires clock_gettime()) and --status switches to searchd

    • added performance counters, Status() API call

    • added overshort_step and stopword_step directives

    • added strict order operator (aka operator before, eg. "one << two << three")

    • added indextool utility, moved --dumpheader there, added --debugdocids, --dumphitlist options

    • added own RNG, reseeded on @random sort query (bug #183)

    • added field-start and field-end modifiers support (syntax is "^hello world$"; field-end requires reindex)

    • added MVA attribute support to IN() function

    • added AND, OR, and NOT support to expressions

    • improved logging of (optimized) multi-queries (now logging chained query count)

    • improved handshake error handling, fixed protocol version byte order (omg)

    • updated SphinxSE to protocol 1.22

    • allowed phrase_boundary_step=-1 (trick to emulate keyword expansion)

    • removed SPH_MAX_QUERY_WORDS limit

    • fixed CLI search vs documents missing from DB (bug #257)

    • fixed libsphinxclient results leak on subsequent sphinx_run_queries call (bug #256)

    • fixed libsphinxclient handling of zero max_matches and cutoff (bug #208)

    • fixed Java API over-64K string reads (eg. big snippets) in Java API (bug #181)

    • fixed Java API 2nd Query() after network error in 1st Query() call (bug #308)

    • fixed typo-class bugs in SetFilterFloatRange (bug #259), SetSortMode (bug #248)

    • fixed missing @@relaxed support (bug #276), fixed missing error on @nosuchfield queries, documented @@relaxed

    • fixed UNIX socket permissions to 0777 (bug #288)

    • fixed xmlpipe2 crash on schemas with no fields, added better document structure checks

    • fixed (and optimized) expr parser vs IN() with huge (10K+) args count

    • fixed double EarlyCalc() in fullscan mode (minor performance impact)

    • fixed phrase boundary handling in some cases (on buffer end, on trailing whitespace)

    • fixes in snippets (aka excerpts) generation

    • fixed inline attrs vs id64 index corruption

    • fixed head searchd crash on config re-parse failure

    • fixed handling of numeric keywords with leading zeroes such as "007" (bug #251)

    • fixed junk in SphinxSE status variables (bug #304)

    • fixed wordlist checkpoints serialization (bug #236)

    • fixed unaligned docinfo id access (bug #230)

    • fixed GetRawBytes() vs oversized blocks (headers with over 32K charset_table should now work, bug #300)

    • fixed buffer overflow caused by too long dest wordform, updated tests

    • fixed IF() return type (was always int, is deduced now)

    • fixed legacy queries vs. special chars vs. multiple indexes

    • fixed write-write-read socket access pattern vs Nagle vs delays vs FreeBSD (oh wow)

    • fixed exceptions vs query-parser issue

    • fixed late calc vs @weight in expressions (bug #285)

    • fixed early lookup/calc vs filters (bug #284)

    • fixed emulated MATCH_ANY queries (empty proximity and phrase queries are allowed now)

    • fixed MATCH_ANY ranker vs fields with no matches

    • fixed index file size vs inplace_enable (bug #245)

    • fixed that old logs were not closed on USR1 (bug #221)

    • fixed handling of '!' alias to NOT operator (bug #237)

    • fixed error handling vs query steps (step failure was not reported)

    • fixed querying vs inline attributes

    • fixed stupid bug in escaping code, fixed EscapeString() and made it static

    • fixed parser vs @field -keyword, foo|@field bar, "" queries (bug #310)

    A.8. Version 0.9.9-rc1, 17 nov 2008

    • added min_stemming_len directive

    • added IsConnectError() API call (helps distingusih API vs remote errors)

    • added duplicate log messages filter to searchd

    • added --nodetach debugging switch to searchd

    • added blackhole agents support for debugging/testing (agent_blackhole directive)

    • added max_filters, max_filter_values directives (were hardcoded before)

    • added int64 expression evaluation path, automatic inference, and BIGINT() enforcer function

    • added crash handler for debugging (crash_log_path directive)

    • added MS SQL (aka SQL Server) source support (Windows only, mssql_winauth and mssql_unicode directives)

    • added indexer-side column unpacking feature (unpack_zlib, unpack_mysqlcompress directives)

    • added nested brackers and NOTs support to query language, rewritten query parser

    • added persistent connections support (Open() and Close() API calls)

    • added index_exact_words feature, and exact form operator to query language ("hello =world")

    • added status variables support to SphinxSE (SHOW STATUS LIKE 'sphinx_%')

    • added max_packet_size directive (was hardcoded at 8M before)

    • added UNIX socket support, and multi-interface support (listen directive)

    • added star-syntax support to BuildExcerpts() API call

    • added inplace inversion of .spa and .spp (inplace_enable directive, 1.5-2x less disk space for indexing)

    • added builtin Czech stemmer (morphology=stem_cz)

    • added IDIV(), NOW(), INTERVAL(), IN() functions to expressions

    • added index-level early-reject based on filters

    • added MVA updates feature (mva_updates_pool directive)

    • added select-list feature with computed expressions support (see SetSelect() API call, test.php --select switch), protocol 1.22

    • added integer expressions support (2x faster than float)

    • added multiforms support (multiple source words in wordforms file)

    • added legacy rankers (MATCH_ALL/MATCH_ANY/etc), removed legacy matching code (everything runs on V2 engine now)

    • added field position limit modifier to field operator (syntax: @title[50] hello world)

    • added killlist support (sql_query_killlist directive, --merge-killlists switch)

    • added on-disk SPI support (ondisk_dict directive)

    • added indexer IO stats

    • added periodic .spa flush (attr_flush_period directive)

    • added config reload on SIGHUP

    • added per-query attribute overrides feature (see SetOverride() API call); protocol 1.21

    • added signed 64bit attrs support (sql_attr_bigint directive)

    • improved HTML stripper to also skip PIs (<? ... ?>, such as <?php ... ?>)

    • improved excerpts speed (upto 50x faster on big documents)

    • fixed a short window of searchd inaccessibility on startup (started listen()ing too early before)

    • fixed .spa loading on systems where read() is 2GB capped

    • fixed infixes vs morphology issues

    • fixed backslash escaping, added backslash to EscapeString()

    • fixed handling of over-2GB dictionary files (.spi)

    A.9. Version 0.9.8.1, 30 oct 2008

    • added configure script to libsphinxclient

    • changed proximity/quorum operator syntax to require whitespace after length

    • fixed potential head process crash on SIGPIPE during "maxed out" message

    • fixed handling of incomplete remote replies (caused over-degraded distributed results, in rare cases)

    • fixed sending of big remote requests (caused distributed requests to fail, in rare cases)

    • fixed FD_SET() overflow (caused searchd to crash on startup, in rare cases)

    • fixed MVA vs distributed indexes (caused loss of 1st MVA value in result set)

    • fixed tokenizing of exceptions terminated by specials (eg. "GPS AT&T" in extended mode)

    • fixed buffer overrun in stemmer on overlong tokens occasionally emitted by proximity/quorum operator parser (caused crashes on certain proximity/quorum queries)

    • fixed wordcount ranker (could be dropping hits)

    • fixed --merge feature (numerous different fixes, caused broken indexes)

    • fixed --merge-dst-range performance

    • fixed prefix/infix generation for stopwords

    • fixed ignore_chars vs specials

    • fixed misplaced F_SETLKW check (caused certain build types, eg. RPM build on FC8, to fail)

    • fixed dictionary-defined charsets support in spelldump, added \x-style wordchars support

    • fixed Java API to properly send long strings (over 64K; eg. long document bodies for excerpts)

    • fixed Python API to accept offset/limit of 'long' type

    • fixed default ID range (that filtered out all 64-bit values) in Java and Python APIs

    A.10. Version 0.9.8, 14 jul 2008

    Indexing

    • added support for 64-bit document and keyword IDs, --enable-id64 switch to configure

    • added support for floating point attributes

    • added support for bitfields in attributes, sql_attr_bool directive and bit-widths part in sql_attr_uint directive

    • added support for multi-valued attributes (MVA)

    • added metaphone preprocessor

    • added libstemmer library support, provides stemmers for a number of additional languages

    • added xmlpipe2 source type, that supports arbitrary fields and attributes

    • added word form dictionaries, wordforms directive (and spelldump utility)

    • added tokenizing exceptions, exceptions directive

    • added an option to fully remove element contents to HTML stripper, html_remove_elements directive

    • added HTML entities decoder (with full XHTML1 set support) to HTML stripper

    • added per-index HTML stripping settings, html_strip, html_index_attrs, and html_remove_elements directives

    • added IO load throttling, max_iops and max_iosize directives

    • added SQL load throttling, sql_ranged_throttle directive

    • added an option to index prefixes/infixes for given fields only, prefix_fields and infix_fields directives

    • added an option to ignore certain characters (instead of just treating them as whitespace), ignore_chars directive

    • added an option to increment word position on phrase boundary characters, phrase_boundary and phrase_boundary_step directives

    • added --merge-dst-range switch (and filters) to index merging feature (--merge switch)

    • added mysql_connect_flags directive (eg. to reduce indexing time MySQL network traffic and/or time)

    • improved ordinals sorting; now runs in fixed RAM

    • improved handling of documents with zero/NULL ids, now skipping them instead of aborting

    Search daemon

    • added an option to unlink old index on succesful rotation, unlink_old directive

    • added an option to keep index files open at all times (fixes subtle races on rotation), preopen and preopen_indexes directives

    • added an option to profile searchd disk I/O, --iostats command-line option

    • added an option to rotate index seamlessly (fully avoids query stalls), seamless_rotate directive

    • added HTML stripping support to excerpts (uses per-index settings)

    • added 'exact_phrase', 'single_passage', 'use_boundaries', 'weight_order 'options to BuildExcerpts() API call

    • added distributed attribute updates propagation

    • added distributed retries on master node side

    • added log reopen on SIGUSR1

    • added --stop switch (sends SIGTERM to running instance)

    • added Windows service mode, and --servicename switch

    • added Windows --rotate support

    • improved log timestamping, now with millisecond precision

    Querying

    • added extended engine V2 (faster, cleaner, better; SPH_MATCH_EXTENDED2 mode)

    • added ranking modes support (V2 engine only; SetRankingMode() API call)

    • added quorum searching support to query language (V2 engine only; example: "any three of all these words"/3)

    • added query escaping support to query language, and EscapeString() API call

    • added multi-field syntax support to query language (example: "@(field1,field2) something"), and @@relaxed field checks option

    • added optional star-syntax ('word*') support in keywords, enable_star directive (for prefix/infix indexes only)

    • added full-scan support (query must be fully empty; can perform block-reject optimization)

    • added COUNT(DISTINCT(attr)) calculation support, SetGroupDistinct() API call

    • added group-by on MVA support, SetArrayResult() PHP API call

    • added per-index weights feature, SetIndexWeights() API call

    • added geodistance support, SetGeoAnchor() API call

    • added result set sorting by arbitrary expressions in run time (eg. "@weight+log(price)*2.5"), SPH_SORT_EXPR mode

    • added result set sorting by @custom compile-time sorting function (see src/sphinxcustomsort.inl)

    • added result set sorting by @random value

    • added result set merging for indexes with different schemas

    • added query comments support (3rd arg to Query()/AddQuery() API calls, copied verbatim to query log)

    • added keyword extraction support, BuildKeywords() API call

    • added binding field weights by name, SetFieldWeights() API call

    • added optional limit on query time, SetMaxQueryTime() API call

    • added optional limit on found matches count (4rd arg to SetLimits() API call, so-called 'cutoff')

    APIs and SphinxSE

    • added pure C API (libsphinxclient)

    • added Ruby API (thanks to Dmytro Shteflyuk)

    • added Java API

    • added SphinxSE support for MVAs (use varchar), floats (use float), 64bit docids (use bigint)

    • added SphinxSE options "floatrange", "geoanchor", "fieldweights", "indexweights", "maxquerytime", "comment", "host" and "port"; and support for "expr:CLAUSE"

    • improved SphinxSE max query size (using MySQL condition pushdown), upto 256K now

    General

    • added scripting (shebang syntax) support to config files (example: #!/usr/bin/php in the first line)

    • added unified config handling and validation to all programs

    • added unified documentation

    • added .spec file for RPM builds

    • added automated testing suite

    • improved index locking, now fcntl()-based instead of buggy file-existence-based

    • fixed unaligned RAM accesses, now works on SPARC and ARM

    Changes and fixes since 0.9.8-rc2

    • added pure C API (libsphinxclient)

    • added Ruby API

    • added SetConnectTimeout() PHP API call

    • added allowed type check to UpdateAttributes() handler (bug #174)

    • added defensive MVA checks on index preload (protection against broken indexes, bug #168)

    • added sphinx-min.conf sample file

    • added --without-iconv switch to configure

    • removed redundant -lz dependency in searchd

    • removed erroneous "xmlpipe2 deprecated" warning

    • fixed EINTR handling in piped read (bug #166)

    • fixup query time before logging and sending to client (bug #153)

    • fixed attribute updates vs full-scan early-reject index (bug #149)

    • fixed gcc warnings (bug #160)

    • fixed mysql connection attempt vs pgsql source type (bug #165)

    • fixed 32-bit wraparound when preloading over 2 GB files

    • fixed "out of memory" message vs over 2 GB allocs (bug #116)

    • fixed unaligned RAM access detection on ARM (where unaligned reads do not crash but produce wrong results)

    • fixed missing full scan results in some cases

    • fixed several bugs in --merge, --merge-dst-range

    • fixed @geodist vs MultiQuery and filters, @expr vs MultiQuery

    • fixed GetTokenEnd() vs 1-grams (was causing crash in excerpts)

    • fixed sql_query_range to handle empty strings in addition to NULL strings (Postgres specific)

    • fixed morphology=none vs infixes

    • fixed case sensitive attributes names in UpdateAttributes()

    • fixed ext2 ranking vs. stopwords (now using atompos from query parser)

    • fixed EscapeString() call

    • fixed escaped specials (now handled as whitespace if not in charset)

    • fixed schema minimizer (now handles type/size mismatches)

    • fixed word stats in extended2; stemmed form is now returned

    • fixed spelldump case folding vs dictionary-defined character sets

    • fixed Postgres BOOLEAN handling

    • fixed enforced "inline" docinfo on empty indexes (normally ok, but index merge was really confused)

    • fixed rare count(distinct) out-of-bounds issue (it occasionaly caused too high @distinct values)

    • fixed hangups on documents with id=DOCID_MAX in some cases

    • fixed rare crash in tokenizer (prefixed synonym vs. input stream eof)

    • fixed query parser vs "aaa (bbb ccc)|ddd" queries

    • fixed BuildExcerpts() request in Java API

    • fixed Postgres specific memory leak

    • fixed handling of overshort keywords (less than min_word_len)

    • fixed HTML stripper (now emits space after indexed attributes)

    • fixed 32-field case in query parser

    • fixed rare count(distinct) vs. querying multiple local indexes vs. reusable sorter issue

    • fixed sorting of negative floats in SPH_SORT_EXTENDED mode

    A.11. Version 0.9.7, 02 apr 2007

    • added support for sql_str2ordinal_column

    • added support for upto 5 sort-by attrs (in extended sorting mode)

    • added support for separate groups sorting clause (in group-by mode)

    • added support for on-the-fly attribute updates (PRE-ALPHA; will change heavily; use for preliminary testing ONLY)

    • added support for zero/NULL attributes

    • added support for 0.9.7 features to SphinxSE

    • added support for n-grams (alpha, 1-grams only for now)

    • added support for warnings reported to client

    • added support for exclude-filters

    • added support for prefix and infix indexing (see max_prefix_len, max_infix_len)

    • added @* syntax to reset current field to query language

    • added removal of duplicate entries in query index order

    • added PHP API workarounds for PHP signed/unsigned braindamage

    • added locks to avoid two concurrent indexers working on same index

    • added check for existing attributes vs. docinfo=none case

    • improved groupby code a lot (better precision, and upto 25x times faster in extreme cases)

    • improved error handling and reporting

    • improved handling of broken indexes (reports error instead of hanging/crashing)

    • improved mmap() limits for attributes and wordlists (now able to map over 4 GB on x64 and over 2 GB on x32 where possible)

    • improved malloc() pressure in head daemon (search time should not degrade with time any more)

    • improved test.php command line options

    • improved error reporting (distributed query, broken index etc issues now reported to client)

    • changed default network packet size to be 8M, added extra checks

    • fixed division by zero in BM25 on 1-document collections (in extended matching mode)

    • fixed .spl files getting unlinked

    • fixed crash in schema compatibility test

    • fixed UTF-8 Russian stemmer

    • fixed requested matches count when querying distributed agents

    • fixed signed vs. unsigned issues everywhere (ranged queries, CLI search output, and obtaining docid)

    • fixed potential crashes vs. negative query offsets

    • fixed 0-match docs vs. extended mode vs. stats

    • fixed group/timestamp filters being ignored if querying from older clients

    • fixed docs to mention pgsql source type

    • fixed issues with explicit '&' in extended matching mode

    • fixed wrong assertion in SBCS encoder

    • fixed crashes with no-attribute indexes after rotate

    A.12. Version 0.9.7-rc2, 15 dec 2006

    • added support for extended matching mode (query language)

    • added support for extended sorting mode (sorting clauses)

    • added support for SBCS excerpts

    • added mmap()ing for attributes and wordlist (improves search time, speeds up fork() greatly)

    • fixed attribute name handling to be case insensitive

    • fixed default compiler options to simplify post-mortem debugging (added -g, removed -fomit-frame-pointer)

    • fixed rare memory leak

    • fixed "hello hello" queries in "match phrase" mode

    • fixed issue with excerpts, texts and overlong queries

    • fixed logging multiple index name (no longer tokenized)

    • fixed trailing stopword not flushed from tokenizer

    • fixed boolean evaluation

    • fixed pidfile being wrongly unlink()ed on bind() failure

    • fixed --with-mysql-includes/libs (they conflicted with well-known paths)

    • fixes for 64-bit platforms

    A.13. Version 0.9.7-rc1, 26 oct 2006

    • added alpha index merging code

    • added an option to decrease max_matches per-query

    • added an option to specify IP address for searchd to listen on

    • added support for unlimited amount of configured sources and indexes

    • added support for group-by queries

    • added support for /2 range modifier in charset_table

    • added support for arbitrary amount of document attributes

    • added logging filter count and index name

    • added --with-debug option to configure to compile in debug mode

    • added -DNDEBUG when compiling in default mode

    • improved search time (added doclist size hints, in-memory wordlist cache, and used VLB coding everywhere)

    • improved (refactored) SQL driver code (adding new drivers should be very easy now)

    • improved exceprts generation

    • fixed issue with empty sources and ranged queries

    • fixed querying purely remote distributed indexes

    • fixed suffix length check in English stemmer in some cases

    • fixed UTF-8 decoder for codes over U+20000 (for CJK)

    • fixed UTF-8 encoder for 3-byte sequences (for CJK)

    • fixed overshort (less than min_word_len) words prepended to next field

    • fixed source connection order (indexer does not connect to all sources at once now)

    • fixed line numbering in config parser

    • fixed some issues with index rotation

    A.14. Version 0.9.6, 24 jul 2006

    • added support for empty indexes

    • added support for multiple sql_query_pre/post/post_index

    • fixed timestamp ranges filter in "match any" mode

    • fixed configure issues with --without-mysql and --with-pgsql options

    • fixed building on Solaris 9

    A.15. Version 0.9.6-rc1, 26 jun 2006

    • added boolean queries support (experimental, beta version)

    • added simple file-based query cache (experimental, beta version)

    • added storage engine for MySQL 5.0 and 5.1 (experimental, beta version)

    • added GNU style configure script

    • added new searchd protocol (all binary, and should be backwards compatible)

    • added distributed searching support to searchd

    • added PostgreSQL driver

    • added excerpts generation

    • added min_word_len option to index

    • added max_matches option to searchd, removed hardcoded MAX_MATCHES limit

    • added initial documentation, and a working example.sql

    • added support for multiple sources per index

    • added soundex support

    • added group ID ranges support

    • added --stdin command-line option to search utility

    • added --noprogress option to indexer

    • added --index option to search

    • fixed UTF-8 decoder (3-byte codepoints did not work)

    • fixed PHP API to handle big result sets faster

    • fixed config parser to handle empty values properly

    • fixed redundant time(NULL) calls in time-segments mode

    sphinx-2.0.4-release/doc/sphinx.xml0000644000176700017710000206673211724063016016563 0ustar deogardeogar ]> Sphinx 2.0.4-release reference manual Free open-source SQL full-text search engine 2001-2012 Andrew Aksyonoff 2008-2012 Sphinx Technologies Inc, http://sphinxsearch.com Introduction About Sphinx is a full-text search engine, publicly distributed under GPL version 2. Commercial licensing (eg. for embedded use) is available upon request. Technically, Sphinx is a standalone software package provides fast and relevant full-text search functionality to client applications. It was specially designed to integrate well with SQL databases storing the data, and to be easily accessed scripting languages. However, Sphinx does not depend on nor require any specific database to function. Applications can access Sphinx search daemon (searchd) using any of the three different access methods: a) via native search API (SphinxAPI), b) via Sphinx own implementation of MySQL network protocol (using a small SQL subset called SphinxQL), or c) via MySQL server with a pluggable storage engine (SphinxSE). Official native SphinxAPI implementations for PHP, Perl, Ruby, and Java are included within the distribution package. API is very lightweight so porting it to a new language is known to take a few hours or days. Third party API ports and plugins exist for Perl, C#, Haskell, Ruby-on-Rails, and possibly other languages and frameworks. Starting version 1.10-beta, Sphinx supports two different indexing backends: "disk" index backend, and "realtime" (RT) index backend. Disk indexes support online full-text index rebuilds, but online updates can only be done on non-text (attribute) data. RT indexes additionally allow for online full-text index updates. Previous versions only supported disk indexes. Data can be loaded into disk indexes using a so-called data source. Built-in sources can fetch data directly from MySQL, PostgreSQL, ODBC compliant database (MS SQL, Oracle, etc), or a pipe in a custom XML format. Adding new data sources drivers (eg. to natively support other DBMSes) is designed to be as easy as possible. RT indexes, as of 1.10-beta, can only be populated using SphinxQL. As for the name, Sphinx is an acronym which is officially decoded as SQL Phrase Index. Yes, I know about CMU's Sphinx project. Sphinx features Key Sphinx features are: high indexing and searching performance; advanced indexing and querying tools (flexible and feature-rich text tokenizer, querying language, several different ranking modes, etc); advanced result set post-processing (SELECT with expressions, WHERE, ORDER BY, GROUP BY etc over text search results); proven scalability up to billions of documents, terabytes of data, and thousands of queries per second; easy integration with SQL and XML data sources, and SphinxAPI, SphinxQL, or SphinxSE search interfaces; easy scaling with distributed searches. To expand a bit, Sphinx: has high indexing speed (upto 10-15 MB/sec per core on an internal benchmark); has high search speed (upto 150-250 queries/sec per core against 1,000,000 documents, 1.2 GB of data on an internal benchmark); has high scalability (biggest known cluster indexes over 3,000,000,000 documents, and busiest one peaks over 50,000,000 queries/day); provides good relevance ranking through combination of phrase proximity ranking and statistical (BM25) ranking; provides distributed searching capabilities; provides document excerpts (snippets) generation; provides searching from within application with SphinxAPI or SphinxQL interfaces, and from within MySQL with pluggable SphinxSE storage engine; supports boolean, phrase, word proximity and other types of queries; supports multiple full-text fields per document (upto 32 by default); supports multiple additional attributes per document (ie. groups, timestamps, etc); supports stopwords; supports morphological word forms dictionaries; supports tokenizing exceptions; supports both single-byte encodings and UTF-8; supports stemming (stemmers for English, Russian and Czech are built-in; and stemmers for French, Spanish, Portuguese, Italian, Romanian, German, Dutch, Swedish, Norwegian, Danish, Finnish, Hungarian, are available by building third party libstemmer library); supports MySQL natively (all types of tables, including MyISAM, InnoDB, NDB, Archive, etc are supported); supports PostgreSQL natively; supports ODBC compliant databases (MS SQL, Oracle, etc) natively; ...has 50+ other features not listed here, refer to API and configuration manual! Where to get Sphinx Sphinx is available through its official Web site at http://sphinxsearch.com/. Currently, Sphinx distribution tarball includes the following software: indexer: an utility which creates fulltext indexes; search: a simple command-line (CLI) test utility which searches through fulltext indexes; searchd: a daemon which enables external software (eg. Web applications) to search through fulltext indexes; sphinxapi: a set of searchd client API libraries for popular Web scripting languages (PHP, Python, Perl, Ruby). spelldump: a simple command-line tool to extract the items from an ispell or MySpell (as bundled with OpenOffice) format dictionary to help customize your index, for use with wordforms. indextool: an utility to dump miscellaneous debug information about the index, added in version 0.9.9-rc2. License This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. See COPYING file for details. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Non-GPL licensing (for OEM/ISV embedded use) can also be arranged, please contact us to discuss commercial licensing possibilities. Credits Author Sphinx initial author (and a benevolent dictator ever since): Andrew Aksyonoff, http://shodan.ru Team Past and present employees of Sphinx Technologies Inc who should be noted on their work on Sphinx (in alphabetical order): Alexander Klimenko Alexey Dvoichenkov Alexey Vinogradov Ilya Kuznetsov Stanislav Klinov Contributors People who contributed to Sphinx and their contributions (in no particular order): Robert "coredev" Bengtsson (Sweden), initial version of PostgreSQL data source Len Kranendonk, Perl API Dmytro Shteflyuk, Ruby API Many other people have contributed ideas, bug reports, fixes, etc. Thank you! History Sphinx development was started back in 2001, because I didn't manage to find an acceptable search solution (for a database driven Web site) which would meet my requirements. Actually, each and every important aspect was a problem: search quality (ie. good relevance) statistical ranking methods performed rather bad, especially on large collections of small documents (forums, blogs, etc) search speed especially if searching for phrases which contain stopwords, as in "to be or not to be" moderate disk and CPU requirements when indexing important in shared hosting enivronment, not to mention the indexing speed. Despite the amount of time passed and numerous improvements made in the other solutions, there's still no solution which I personally would be eager to migrate to. Considering that and a lot of positive feedback received from Sphinx users during last years, the obvious decision is to continue developing Sphinx (and, eventually, to take over the world). Installation Supported systems Most modern UNIX systems with a C++ compiler should be able to compile and run Sphinx without any modifications. Currently known systems Sphinx has been successfully running on are: Linux 2.4.x, 2.6.x (many various distributions) Windows 2000, XP, 7 FreeBSD 4.x, 5.x, 6.x, 7.x, 8.x NetBSD 1.6, 3.0 Solaris 9, 11 Mac OS X CPU architectures known to work include i386 (aka x86), amd64 (aka x86_64), SPARC64, and ARM. Chances are good that Sphinx should work on other Unix platforms and/or CPU architectures just as well. Please report any other platforms that worked for you! All platforms are production quality. There are no principal functional limitations on any platform. Required tools On UNIX, you will need the following tools to build and install Sphinx: a working C++ compiler. GNU gcc is known to work. a good make program. GNU make is known to work. On Windows, you will need Microsoft Visual C/C++ Studio .NET 2005 or above. Other compilers/environments will probably work as well, but for the time being, you will have to build makefile (or other environment specific project files) manually. Installing Sphinx on Linux Extract everything from the distribution tarball (haven't you already?) and go to the sphinx subdirectory. (We are using version 2.0.1-beta here for the sake of example only; be sure to change this to a specific version you're using.) $ tar xzvf sphinx-2.0.1-beta.tar.gz $ cd sphinx Run the configuration program: $ ./configure There's a number of options to configure. The complete listing may be obtained by using switch. The most important ones are: , which specifies where to install Sphinx; such as (all of the examples use this prefix) , which specifies where to look for MySQL include and library files, if auto-detection fails; , which specifies where to look for PostgreSQL include and library files. Build the binaries: $ make Install the binaries in the directory of your choice: (defaults to /usr/local/bin/ on *nix systems, but is overridden with ) $ make install Installing Sphinx on Windows Installing Sphinx on a Windows server is often easier than installing on a Linux environment; unless you are preparing code patches, you can use the pre-compiled binary files from the Downloads area on the website. Extract everything from the .zip file you have downloaded - sphinx-2.0.1-beta-win32.zip, or sphinx-2.0.1-beta-win32-pgsql.zip if you need PostgresSQL support as well. (We are using version 2.0.1-beta here for the sake of example only; be sure to change this to a specific version you're using.) You can use Windows Explorer in Windows XP and up to extract the files, or a freeware package like 7Zip to open the archive. For the remainder of this guide, we will assume that the folders are unzipped into C:\Sphinx, such that searchd.exe can be found in C:\Sphinx\bin\searchd.exe. If you decide to use any different location for the folders or configuration file, please change it accordingly. Edit the contents of sphinx.conf.in - specifically entries relating to @CONFDIR@ - to paths suitable for your system. Install the searchd system as a Windows service: C:\Sphinx\bin> C:\Sphinx\bin\searchd --install --config C:\Sphinx\sphinx.conf.in --servicename SphinxSearch The searchd service will now be listed in the Services panel within the Management Console, available from Administrative Tools. It will not have been started, as you will need to configure it and build your indexes with indexer before starting the service. A guide to do this can be found under Quick tour. During the next steps of the install (which involve running indexer pretty much as you would on Linux) you may find that you get an error relating to libmysql.dll not being found. If you have MySQL installed, you should find a copy of this library in your Windows directory, or sometimes in Windows\System32, or failing that in the MySQL core directories. If you do receive an error please copy libmysql.dll into the bin directory. Known installation issues If configure fails to locate MySQL headers and/or libraries, try checking for and installing mysql-devel package. On some systems, it is not installed by default. If make fails with a message which look like /bin/sh: g++: command not found make[1]: *** [libsphinx_a-sphinx.o] Error 127 try checking for and installing gcc-c++ package. If you are getting compile-time errors which look like sphinx.cpp:67: error: invalid application of `sizeof' to incomplete type `Private::SizeError<false>' this means that some compile-time type size check failed. The most probable reason is that off_t type is less than 64-bit on your system. As a quick hack, you can edit sphinx.h and replace off_t with DWORD in a typedef for SphOffset_t, but note that this will prohibit you from using full-text indexes larger than 2 GB. Even if the hack helps, please report such issues, providing the exact error message and compiler/OS details, so I could properly fix them in next releases. If you keep getting any other error, or the suggestions above do not seem to help you, please don't hesitate to contact me. Quick Sphinx usage tour All the example commands below assume that you installed Sphinx in /usr/local/sphinx, so searchd can be found in /usr/local/sphinx/bin/searchd. To use Sphinx, you will need to: Create a configuration file. Default configuration file name is sphinx.conf. All Sphinx programs look for this file in current working directory by default. Sample configuration file, sphinx.conf.dist, which has all the options documented, is created by configure. Copy and edit that sample file to make your own configuration: (assuming Sphinx is installed into /usr/local/sphinx/) $ cd /usr/local/sphinx/etc $ cp sphinx.conf.dist sphinx.conf $ vi sphinx.conf Sample configuration file is setup to index documents table from MySQL database test; so there's example.sql sample data file to populate that table with a few documents for testing purposes: $ mysql -u test < /usr/local/sphinx/etc/example.sql Run the indexer to create full-text index from your data: $ cd /usr/local/sphinx/etc $ /usr/local/sphinx/bin/indexer --all Query your newly created index! To query the index from command line, use search utility: $ cd /usr/local/sphinx/etc $ /usr/local/sphinx/bin/search test To query the index from your PHP scripts, you need to: Run the search daemon which your script will talk to: $ cd /usr/local/sphinx/etc $ /usr/local/sphinx/bin/searchd Run the attached PHP API test script (to ensure that the daemon was succesfully started and is ready to serve the queries): $ cd sphinx/api $ php test.php test Include the API (it's located in api/sphinxapi.php) into your own scripts and use it. Happy searching! Indexing Data sources The data to be indexed can generally come from very different sources: SQL databases, plain text files, HTML files, mailboxes, and so on. From Sphinx point of view, the data it indexes is a set of structured documents, each of which has the same set of fields and attributes. This is similar to SQL, where each row would correspond to a document, and each column to either a field or an attribute. Depending on what source Sphinx should get the data from, different code is required to fetch the data and prepare it for indexing. This code is called data source driver (or simply driver or data source for brevity). At the time of this writing, there are built-in drivers for MySQL, PostgreSQL, MS SQL (on Windows), and ODBC. There is also a generic driver called xmlpipe, which runs a specified command and reads the data from its stdout. See section for the format description. There can be as many sources per index as necessary. They will be sequentially processed in the very same order which was specifed in index definition. All the documents coming from those sources will be merged as if they were coming from a single source. Full-text fields Full-text fields (or just fields for brevity) are the textual document contents that get indexed by Sphinx, and can be (quickly) searched for keywords. Fields are named, and you can limit your searches to a single field (eg. search through "title" only) or a subset of fields (eg. to "title" and "abstract" only). Sphinx index format generally supports up to 256 fields. However, up to version 2.0.1-beta indexes were forcibly limited by 32 fields, because of certain complications in the matching engine. Full support for up to 256 fields was added in version 2.0.2-beta. Note that the original contents of the fields are not stored in the Sphinx index. The text that you send to Sphinx gets processed, and a full-text index (a special data structure that enables quick searches for a keyword) gets built from that text. But the original text contents are then simply discarded. Sphinx assumes that you store those contents elsewhere anyway. Moreover, it is impossible to fully reconstruct the original text, because the specific whitespace, capitalization, punctuation, etc will all be lost during indexing. It is theoretically possible to partially reconstruct a given document from the Sphinx full-text index, but that would be a slow process (especially if the CRC dictionary is used, which does not even store the original keywords and works with their hashes instead). Attributes Attributes are additional values associated with each document that can be used to perform additional filtering and sorting during search. It is often desired to additionally process full-text search results based not only on matching document ID and its rank, but on a number of other per-document values as well. For instance, one might need to sort news search results by date and then relevance, or search through products within specified price range, or limit blog search to posts made by selected users, or group results by month. To do that efficiently, Sphinx allows to attach a number of additional attributes to each document, and store their values in the full-text index. It's then possible to use stored values to filter, sort, or group full-text matches. Attributes, unlike the fields, are not full-text indexed. They are stored in the index, but it is not possible to search them as full-text, and attempting to do so results in an error. For example, it is impossible to use the extended matching mode expression to match documents where column is 1, if column is an attribute, and this is still true even if the numeric digits are normally indexed. Attributes can be used for filtering, though, to restrict returned rows, as well as sorting or result grouping; it is entirely possible to sort results purely based on attributes, and ignore the search relevance tools. Additionally, attributes are returned from the search daemon, while the indexed text is not. A good example for attributes would be a forum posts table. Assume that only title and content fields need to be full-text searchable - but that sometimes it is also required to limit search to a certain author or a sub-forum (ie. search only those rows that have some specific values of author_id or forum_id columns in the SQL table); or to sort matches by post_date column; or to group matching posts by month of the post_date and calculate per-group match counts. This can be achieved by specifying all the mentioned columns (excluding title and content, that are full-text fields) as attributes, indexing them, and then using API calls to setup filtering, sorting, and grouping. Here as an example. Example sphinx.conf part: ... sql_query = SELECT id, title, content, \ author_id, forum_id, post_date FROM my_forum_posts sql_attr_uint = author_id sql_attr_uint = forum_id sql_attr_timestamp = post_date ... Example application code (in PHP): // only search posts by author whose ID is 123 $cl->SetFilter ( "author_id", array ( 123 ) ); // only search posts in sub-forums 1, 3 and 7 $cl->SetFilter ( "forum_id", array ( 1,3,7 ) ); // sort found posts by posting date in descending order $cl->SetSortMode ( SPH_SORT_ATTR_DESC, "post_date" ); Attributes are named. Attribute names are case insensitive. Attributes are not full-text indexed; they are stored in the index as is. Currently supported attribute types are: unsigned integers (1-bit to 32-bit wide); UNIX timestamps; floating point values (32-bit, IEEE 754 single precision); string ordinals (specially computed integers); strings (since 1.10-beta); MVA, multi-value attributes (variable-length lists of 32-bit unsigned integers). The complete set of per-document attribute values is sometimes referred to as docinfo. Docinfos can either be stored separately from the main full-text index data ("extern" storage, in .spa file), or attached to each occurence of document ID in full-text index data ("inline" storage, in .spd file). When using extern storage, a copy of .spa file (with all the attribute values for all the documents) is kept in RAM by searchd at all times. This is for performance reasons; random disk I/O would be too slow. On the contrary, inline storage does not require any additional RAM at all, but that comes at the cost of greatly inflating the index size: remember that it copies all attribute value every time when the document ID is mentioned, and that is exactly as many times as there are different keywords in the document. Inline may be the only viable option if you have only a few attributes and need to work with big datasets in limited RAM. However, in most cases extern storage makes both indexing and searching much more efficient. Search-time memory requirements for extern storage are (1+number_of_attrs)*number_of_docs*4 bytes, ie. 10 million docs with 2 groups and 1 timestamp will take (1+2+1)*10M*4 = 160 MB of RAM. This is PER DAEMON, not per query. searchd will allocate 160 MB on startup, read the data and keep it shared between queries. The children will NOT allocate any additional copies of this data. MVA (multi-valued attributes) MVAs, or multi-valued attributes, are an important special type of per-document attributes in Sphinx. MVAs let you attach sets of numeric values to every document. That is useful to implement article tags, product categories, etc. Filtering and group-by (but not sorting) on MVA attributes is supported. As of version 2.0.2-beta, MVA values can either be unsigned 32-bit integers (UNSIGNED INTEGER) or signed 64-bit integers (BIGINT). Up to version 2.0.1-beta, only the unsigned 32-bit values were supported. The set size is not limited, you can have an arbitrary number of values attached to each document as long as RAM permits (.spm file that contains the MVA values will be precached in RAM by searchd). The source data can be taken either from a separate query, or from a document field; see source type in sql_attr_multi. In the first case the query will have to return pairs of document ID and MVA values, in the second one the field will be parsed for integer values. There are absolutely no requirements as to incoming data order; the values will be automatically grouped by document ID (and internally sorted within the same ID) during indexing anyway. When filtering, a document will match the filter on MVA attribute if any of the values satisfy the filtering condition. (Therefore, documents that pass through exclude filters will not contain any of the forbidden values.) When grouping by MVA attribute, a document will contribute to as many groups as there are different MVA values associated with that document. For instance, if the collection contains exactly 1 document having a 'tag' MVA with values 5, 7, and 11, grouping on 'tag' will produce 3 groups with '@count' equal to 1 and '@groupby' key values of 5, 7, and 11 respectively. Also note that grouping by MVA might lead to duplicate documents in the result set: because each document can participate in many groups, it can be chosen as the best one in in more than one group, leading to duplicate IDs. PHP API historically uses ordered hash on the document ID for the resulting rows; so you'll also need to use SetArrayResult() in order to employ group-by on MVA with PHP API. Indexes To be able to answer full-text search queries fast, Sphinx needs to build a special data structure optimized for such queries from your text data. This structure is called index; and the process of building index from text is called indexing. Different index types are well suited for different tasks. For example, a disk-based tree-based index would be easy to update (ie. insert new documents to existing index), but rather slow to search. Therefore, Sphinx architecture allows for different index types to be implemented easily. The only index type which is implemented in Sphinx at the moment is designed for maximum indexing and searching speed. This comes at a cost of updates being really slow; theoretically, it might be slower to update this type of index than than to reindex it from scratch. However, this very frequently could be worked around with muiltiple indexes, see for details. It is planned to implement more index types, including the type which would be updateable in real time. There can be as many indexes per configuration file as necessary. indexer utility can reindex either all of them (if option is specified), or a certain explicitly specified subset. searchd utility will serve all the specified indexes, and the clients can specify what indexes to search in run time. Restrictions on the source data There are a few different restrictions imposed on the source data which is going to be indexed by Sphinx, of which the single most important one is: ALL DOCUMENT IDS MUST BE UNIQUE UNSIGNED NON-ZERO INTEGER NUMBERS (32-BIT OR 64-BIT, DEPENDING ON BUILD TIME SETTINGS). If this requirement is not met, different bad things can happen. For instance, Sphinx can crash with an internal assertion while indexing; or produce strange results when searching due to conflicting IDs. Also, a 1000-pound gorilla might eventually come out of your display and start throwing barrels at you. You've been warned. Charsets, case folding, and translation tables When indexing some index, Sphinx fetches documents from the specified sources, splits the text into words, and does case folding so that "Abc", "ABC" and "abc" would be treated as the same word (or, to be pedantic, term). To do that properly, Sphinx needs to know what encoding is the source text in; what characters are letters and what are not; what letters should be folded to what letters. This should be configured on a per-index basis using and options. specifies whether the document encoding is single-byte (SBCS) or UTF-8. specifies the table that maps letter characters to their case folded versions. The characters that are not in the table are considered to be non-letters and will be treated as word separators when indexing or searching through this index. Note that while default tables do not include space character (ASCII code 0x20, Unicode U+0020) as a letter, it's in fact perfectly legal to do so. This can be useful, for instance, for indexing tag clouds, so that space-separated word sets would index as a single search query term. Default tables currently include English and Russian characters. Please do submit your tables for other languages! SQL data sources (MySQL, PostgreSQL) With all the SQL drivers, indexing generally works as follows. connection to the database is established; pre-query (see ) is executed to perform any necessary initial setup, such as setting per-connection encoding with MySQL; main query (see ) is executed and the rows it returns are indexed; post-query (see ) is executed to perform any necessary cleanup; connection to the database is closed; indexer does the sorting phase (to be pedantic, index-type specific post-processing); connection to the database is established again; post-index query (see ) is executed to perform any necessary final cleanup; connection to the database is closed again. Most options, such as database user/host/password, are straightforward. However, there are a few subtle things, which are discussed in more detail here. Ranged queries Main query, which needs to fetch all the documents, can impose a read lock on the whole table and stall the concurrent queries (eg. INSERTs to MyISAM table), waste a lot of memory for result set, etc. To avoid this, Sphinx supports so-called ranged queries. With ranged queries, Sphinx first fetches min and max document IDs from the table, and then substitutes different ID intervals into main query text and runs the modified query to fetch another chunk of documents. Here's an example. Ranged query usage example # in sphinx.conf sql_query_range = SELECT MIN(id),MAX(id) FROM documents sql_range_step = 1000 sql_query = SELECT * FROM documents WHERE id>=$start AND id<=$end If the table contains document IDs from 1 to, say, 2345, then sql_query would be run three times: with replaced with 1 and replaced with 1000; with replaced with 1001 and replaced with 2000; with replaced with 2000 and replaced with 2345. Obviously, that's not much of a difference for 2000-row table, but when it comes to indexing 10-million-row MyISAM table, ranged queries might be of some help. vs. The difference between post-query and post-index query is in that post-query is run immediately when Sphinx received all the documents, but further indexing may still fail for some other reason. On the contrary, by the time the post-index query gets executed, it is guaranteed that the indexing was succesful. Database connection is dropped and re-established because sorting phase can be very lengthy and would just timeout otherwise. xmlpipe data source xmlpipe data source was designed to enable users to plug data into Sphinx without having to implement new data sources drivers themselves. It is limited to 2 fixed fields and 2 fixed attributes, and is deprecated in favor of now. For new streams, use xmlpipe2. To use xmlpipe, configure the data source in your configuration file as follows: source example_xmlpipe_source { type = xmlpipe xmlpipe_command = perl /www/mysite.com/bin/sphinxpipe.pl } The indexer will run the command specified in , and then read, parse and index the data it prints to stdout. More formally, it opens a pipe to given command and then reads from that pipe. indexer will expect one or more documents in custom XML format. Here's the example document stream, consisting of two documents: XMLpipe document stream <document> <id>123</id> <group>45</group> <timestamp>1132223498</timestamp> <title>test title</title> <body> this is my document body </body> </document> <document> <id>124</id> <group>46</group> <timestamp>1132223498</timestamp> <title>another test</title> <body> this is another document </body> </document> Legacy xmlpipe legacy driver uses a builtin parser which is pretty fast but really strict and does not actually fully support XML. It requires that all the fields must be present, formatted exactly as in this example, and occur exactly in the same order. The only optional field is ; it defaults to 1. xmlpipe2 data source xmlpipe2 lets you pass arbitrary full-text and attribute data to Sphinx in yet another custom XML format. It also allows to specify the schema (ie. the set of fields and attributes) either in the XML stream itself, or in the source settings. When indexing xmlpipe2 source, indexer runs the given command, opens a pipe to its stdout, and expects well-formed XML stream. Here's sample stream data: xmlpipe2 document stream <?xml version="1.0" encoding="utf-8"?> <sphinx:docset> <sphinx:schema> <sphinx:field name="subject"/> <sphinx:field name="content"/> <sphinx:attr name="published" type="timestamp"/> <sphinx:attr name="author_id" type="int" bits="16" default="1"/> </sphinx:schema> <sphinx:document id="1234"> <content>this is the main content <![CDATA[[and this <cdata> entry must be handled properly by xml parser lib]]></content> <published>1012325463</published> <subject>note how field/attr tags can be in <b class="red">randomized</b> order</subject> <misc>some undeclared element</misc> </sphinx:document> <sphinx:document id="1235"> <subject>another subject</subject> <content>here comes another document, and i am given to understand, that in-document field order must not matter, sir</content> <published>1012325467</published> </sphinx:document> <!-- ... even more sphinx:document entries here ... --> <sphinx:killlist> <id>1234</id> <id>4567</id> </sphinx:killlist> </sphinx:docset> Arbitrary fields and attributes are allowed. They also can occur in the stream in arbitrary order within each document; the order is ignored. There is a restriction on maximum field length; fields longer than 2 MB will be truncated to 2 MB (this limit can be changed in the source). The schema, ie. complete fields and attributes list, must be declared before any document could be parsed. This can be done either in the configuration file using and settings, or right in the stream using <sphinx:schema> element. <sphinx:schema> is optional. It is only allowed to occur as the very first sub-element in <sphinx:docset>. If there is no in-stream schema definition, settings from the configuration file will be used. Otherwise, stream settings take precedence. Unknown tags (which were not declared neither as fields nor as attributes) will be ignored with a warning. In the example above, <misc> will be ignored. All embedded tags and their attributes (such as <b> in <subject> in the example above) will be silently ignored. Support for incoming stream encodings depends on whether iconv is installed on the system. xmlpipe2 is parsed using libexpat parser that understands US-ASCII, ISO-8859-1, UTF-8 and a few UTF-16 variants natively. Sphinx configure script will also check for libiconv presence, and utilize it to handle other encodings. libexpat also enforces the requirement to use UTF-8 charset on Sphinx side, because the parsed data it returns is always in UTF-8. XML elements (tags) recognized by xmlpipe2 (and their attributes where applicable) are: sphinx:docset Mandatory top-level element, denotes and contains xmlpipe2 document set. sphinx:schema Optional element, must either occur as the very first child of sphinx:docset, or never occur at all. Declares the document schema. Contains field and attribute declarations. If present, overrides per-source settings from the configuration file. sphinx:field Optional element, child of sphinx:schema. Declares a full-text field. Known attributes are: "name", specifies the XML element name that will be treated as a full-text field in the subsequent documents. "attr", specifies whether to also index this field as a string or word count attribute. Possible values are "string" and "wordcount". Introduced in version 1.10-beta. sphinx:attr Optional element, child of sphinx:schema. Declares an attribute. Known attributes are: "name", specifies the element name that should be treated as an attribute in the subsequent documents. "type", specifies the attribute type. Possible values are "int", "timestamp", "str2ordinal", "bool", "float" and "multi". "bits", specifies the bit size for "int" attribute type. Valid values are 1 to 32. "default", specifies the default value for this attribute that should be used if the attribute's element is not present in the document. sphinx:document Mandatory element, must be a child of sphinx:docset. Contains arbitrary other elements with field and attribute values to be indexed, as declared either using sphinx:field and sphinx:attr elements or in the configuration file. The only known attribute is "id" that must contain the unique integer document ID. sphinx:killlist Optional element, child of sphinx:docset. Contains a number of "id" elements whose contents are document IDs to be put into a kill-list for this index. Live index updates There are two major approaches to maintaining the full-text index contents up to date. Note, however, that both these approaches deal with the task of full-text data updates, and not attribute updates. Instant attribute updates are supported since version 0.9.8. Refer to UpdateAttributes() API call description for details. First, you can use disk-based indexes, partition them manually, and only rebuild the smaller partitions (so-called "deltas") frequently. By minimizing the rebuild size, you can reduce the average indexing lag to something as low as 30-60 seconds. This approach was the the only one available in versions 0.9.x. On huge collections it actually might be the most efficient one. Refer to for details. Second, versions 1.x (starting with 1.10-beta) add support for so-called real-time indexes (RT indexes for short) that on-the-fly updates of the full-text data. Updates on a RT index can appear in the search results in 1-2 milliseconds, ie. 0.001-0.002 seconds. However, RT index are less efficient for bulk indexing huge amounts of data. Refer to for details. Delta index updates There's a frequent situation when the total dataset is too big to be reindexed from scratch often, but the amount of new records is rather small. Example: a forum with a 1,000,000 archived posts, but only 1,000 new posts per day. In this case, "live" (almost real time) index updates could be implemented using so called "main+delta" scheme. The idea is to set up two sources and two indexes, with one "main" index for the data which only changes rarely (if ever), and one "delta" for the new documents. In the example above, 1,000,000 archived posts would go to the main index, and newly inserted 1,000 posts/day would go to the delta index. Delta index could then be reindexed very frequently, and the documents can be made available to search in a matter of minutes. Specifying which documents should go to what index and reindexing main index could also be made fully automatic. One option would be to make a counter table which would track the ID which would split the documents, and update it whenever the main index is reindexed. Fully automated live updates # in MySQL CREATE TABLE sph_counter ( counter_id INTEGER PRIMARY KEY NOT NULL, max_doc_id INTEGER NOT NULL ); # in sphinx.conf source main { # ... sql_query_pre = SET NAMES utf8 sql_query_pre = REPLACE INTO sph_counter SELECT 1, MAX(id) FROM documents sql_query = SELECT id, title, body FROM documents \ WHERE id<=( SELECT max_doc_id FROM sph_counter WHERE counter_id=1 ) } source delta : main { sql_query_pre = SET NAMES utf8 sql_query = SELECT id, title, body FROM documents \ WHERE id>( SELECT max_doc_id FROM sph_counter WHERE counter_id=1 ) } index main { source = main path = /path/to/main # ... all the other settings } # note how all other settings are copied from main, # but source and path are overridden (they MUST be) index delta : main { source = delta path = /path/to/delta } Note how we're overriding sql_query_pre in the delta source. We need to explicitly have that override. Otherwise REPLACE query would be run when indexing delta source too, effectively nullifying it. However, when we issue the directive in the inherited source for the first time, it removes all inherited values, so the encoding setup is also lost. So sql_query_pre in the delta can not just be empty; and we need to issue the encoding setup query explicitly once again. Index merging Merging two existing indexes can be more efficient that indexing the data from scratch, and desired in some cases (such as merging 'main' and 'delta' indexes instead of simply reindexing 'main' in 'main+delta' partitioning scheme). So indexer has an option to do that. Merging the indexes is normally faster than reindexing but still not instant on huge indexes. Basically, it will need to read the contents of both indexes once and write the result once. Merging 100 GB and 1 GB index, for example, will result in 202 GB of IO (but that's still likely less than the indexing from scratch requires). The basic command syntax is as follows: indexer --merge DSTINDEX SRCINDEX [--rotate] Only the DSTINDEX index will be affected: the contents of SRCINDEX will be merged into it. switch will be required if DSTINDEX is already being served by searchd. The initially devised usage pattern is to merge a smaller update from SRCINDEX into DSTINDEX. Thus, when merging the attributes, values from SRCINDEX will win if duplicate document IDs are encountered. Note, however, that the "old" keywords will not be automatically removed in such cases. For example, if there's a keyword "old" associated with document 123 in DSTINDEX, and a keyword "new" associated with it in SRCINDEX, document 123 will be found by both keywords after the merge. You can supply an explicit condition to remove documents from DSTINDEX to mitigate that; the relevant switch is : indexer --merge main delta --merge-dst-range deleted 0 0 This switch lets you apply filters to the destination index along with merging. There can be several filters; all of their conditions must be met in order to include the document in the resulting mergid index. In the example above, the filter passes only those records where 'deleted' is 0, eliminating all records that were flagged as deleted (for instance, using UpdateAttributes() call). Real-time indexes Real-time indexes (or RT indexes for brevity) are a new backend that lets you insert, update, or delete documents (rows) on the fly. RT indexes were added in version 1.10-beta. While querying of RT indexes is possible using any of the SphinxAPI, SphinxQL, or SphinxSE, updating them is only possible via SphinxQL at the moment. Full SphinxQL reference is available in . RT indexes overview RT indexes should be declared in sphinx.conf, just as every other index type. Notable differences from the regular, disk-based indexes are that a) data sources are not required and ignored, and b) you should explicitly enumerate all the text fields, not just attributes. Here's an example: RT index declaration index rt { type = rt path = /usr/local/sphinx/data/rt rt_field = title rt_field = content rt_attr_uint = gid } As of 2.0.1-beta and above, RT indexes are production quality, despite a few missing features. RT index can be accessed using MySQL protocol. INSERT, REPLACE, DELETE, and SELECT statements against RT index are supported. For instance, this is an example session with the sample index above: $ mysql -h 127.0.0.1 -P 9306 Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 1 Server version: 1.10-dev (r2153) Type 'help;' or '\h' for help. Type '\c' to clear the buffer. mysql> INSERT INTO rt VALUES ( 1, 'first record', 'test one', 123 ); Query OK, 1 row affected (0.05 sec) mysql> INSERT INTO rt VALUES ( 2, 'second record', 'test two', 234 ); Query OK, 1 row affected (0.00 sec) mysql> SELECT * FROM rt; +------+--------+------+ | id | weight | gid | +------+--------+------+ | 1 | 1 | 123 | | 2 | 1 | 234 | +------+--------+------+ 2 rows in set (0.02 sec) mysql> SELECT * FROM rt WHERE MATCH('test'); +------+--------+------+ | id | weight | gid | +------+--------+------+ | 1 | 1643 | 123 | | 2 | 1643 | 234 | +------+--------+------+ 2 rows in set (0.01 sec) mysql> SELECT * FROM rt WHERE MATCH('@title test'); Empty set (0.00 sec) Both partial and batch INSERT syntaxes are supported, ie. you can specify a subset of columns, and insert several rows at a time. Deletions are also possible using DELETE statement; the only currently supported syntax is DELETE FROM <index> WHERE id=<id>. REPLACE is also supported, enabling you to implement updates. mysql> INSERT INTO rt ( id, title ) VALUES ( 3, 'third row' ), ( 4, 'fourth entry' ); Query OK, 2 rows affected (0.01 sec) mysql> SELECT * FROM rt; +------+--------+------+ | id | weight | gid | +------+--------+------+ | 1 | 1 | 123 | | 2 | 1 | 234 | | 3 | 1 | 0 | | 4 | 1 | 0 | +------+--------+------+ 4 rows in set (0.00 sec) mysql> DELETE FROM rt WHERE id=2; Query OK, 0 rows affected (0.00 sec) mysql> SELECT * FROM rt WHERE MATCH('test'); +------+--------+------+ | id | weight | gid | +------+--------+------+ | 1 | 1500 | 123 | +------+--------+------+ 1 row in set (0.00 sec) mysql> INSERT INTO rt VALUES ( 1, 'first record on steroids', 'test one', 123 ); ERROR 1064 (42000): duplicate id '1' mysql> REPLACE INTO rt VALUES ( 1, 'first record on steroids', 'test one', 123 ); Query OK, 1 row affected (0.01 sec) mysql> SELECT * FROM rt WHERE MATCH('steroids'); +------+--------+------+ | id | weight | gid | +------+--------+------+ | 1 | 1500 | 123 | +------+--------+------+ 1 row in set (0.01 sec) Data stored in RT index should survive clean shutdown. When binary logging is enabled, it should also survive crash and/or dirty shutdown, and recover on subsequent startup. Known caveats with RT indexes As of 1.10-beta, RT indexes are a beta quality feature: while no major, showstopper-class issues are known, there still are a few known usage quirks. Those quirks are listed in this section. Prefix and infix indexing are not supported yet. MVAs are not supported yet. Disk chunks optimization routine is not implemented yet. On initial index creation, attributes are reordered by type, in the following order: uint, bigint, float, timestamp, string. So when using INSERT without an explicit column names list, specify all uint column values first, then bigint, etc. Default conservative RAM chunk limit () of 32M can lead to poor performance on bigger indexes, you should raise it to 256..1024M if you're planning to index gigabytes. High DELETE/REPLACE rate can lead to kill-list fragmentation and impact searching performance. No transaction size limits are currently imposed; too many concurrent INSERT/REPLACE transactions might therefore consume a lot of RAM. In case of a damaged binlog, recovery will stop on the first damaged transaction, even though it's technically possible to keep looking further for subsequent undamaged transactions, and recover those. This mid-file damage case (due to flaky HDD/CDD/tape?) is supposed to be extremely rare, though. Multiple INSERTs grouped in a single transaction perform better than equivalent single-row transactions and are recommended for batch loading of data. RT index internals RT index is internally chunked. It keeps a so-called RAM chunk that stores all the most recent changes. RAM chunk memory usage is rather strictly limited with per-index rt_mem_limit directive. Once RAM chunk grows over this limit, a new disk chunk is created from its data, and RAM chunk is reset. Thus, while most changes on the RT index will be performed in RAM only and complete instantly (in milliseconds), those changes that overflow the RAM chunk will stall for the duration of disk chunk creation (a few seconds). Disk chunks are, in fact, just regular disk-based indexes. But they're a part of an RT index and automatically managed by it, so you need not configure nor manage them manually. Because a new disk chunk is created every time RT chunk overflows the limit, and because in-memory chunk format is close to on-disk format, the disk chunks will be approximately bytes in size each. Generally, it is better to set the limit bigger, to minimize both the frequency of flushes, and the index fragmentation (number of disk chunks). For instance, on a dedicated search server that handles a big RT index, it can be advised to set to 1-2 GB. A global limit on all indexes is also planned, but not yet implemented yet as of 1.10-beta. Disk chunk full-text index data can not be actually modified, so the full-text field changes (ie. row deletions and updates) suppress a previous row version from a disk chunk using a kill-list, but do not actually physically purge the data. Therefore, on workloads with high full-text updates ratio index might eventually get polluted by these previous row versions, and searching performance would degrade. Physical index purging that would improve the performance is planned, but not yet implemented as of 1.10-beta. Data in RAM chunk gets saved to disk on clean daemon shutdown, and then loaded back on startup. However, on daemon or server crash, updates from RAM chunk might be lost. To prevent that, binary logging of transactions can be used; see for details. Full-text changes in RT index are transactional. They are stored in a per-thread accumulator until COMMIT, then applied at once. Bigger batches per single COMMIT should result in faster indexing. Binary logging Binary logs are essentially a recovery mechanism. With binary logs enabled, searchd writes every given transaction to the binlog file, and uses that for recovery after an unclean shutdown. On clean shutdown, RAM chunks are saved to disk, and then all the binlog files are unlinked. During normal operation, a new binlog file will be opened every time when limit is reached. Older, already closed binlog files are kept until all of the transactions stored in them (from all indexes) are flushed as a disk chunk. Setting the limit to 0 pretty much prevents binlog from being unlinked at all while searchd is running; however, it will still be unlinked on clean shutdown. (This is the default case as of 2.0.3-release, defaults to 0.) There are 3 different binlog flushing strategies, controlled by binlog_flush directive which takes the values of 0, 1, or 2. 0 means to flush the log to OS and sync it to disk every second; 1 means flush and sync every transaction; and 2 (the default mode) means flush every transaction but sync every second. Sync is relatively slow because it has to perform physical disk writes, so mode 1 is the safest (every committed transaction is guaranteed to be written on disk) but the slowest. Flushing log to OS prevents from data loss on searchd crashes but not system crashes. Mode 2 is the default. On recovery after an unclean shutdown, binlogs are replayed and all logged transactions since the last good on-disk state are restored. Transactions are checksummed so in case of binlog file corruption garbage data will not be replayed; such a broken transaction will be detected and, currently, will stop replay. Transactions also start with a magic marker and timestamped, so in case of binlog damage in the middle of the file, it's technically possible to skip broken transactions and keep replaying from the next good one, and/or it's possible to replay transactions until a given timestamp (point-in-time recovery), but none of that is implemented yet as of 1.10-beta. One unwanted side effect of binlogs is that actively updating a small RT index that fully fits into a RAM chunk part will lead to an ever-growing binlog that can never be unlinked until clean shutdown. Binlogs are essentially append-only deltas against the last known good saved state on disk, and unless RAM chunk gets saved, they can not be unlinked. An ever-growing binlog is not very good for disk use and crash recovery time. Starting with 2.0.1-beta you can configure searchd to perform a periodic RAM chunk flush to fix that problem using a rt_flush_period directive. With periodic flushes enabled, searchd will keep a separate thread, checking whether RT indexes RAM chunks need to be written back to disk. Once that happens, the respective binlogs can be (and are) safely unlinked. Note that rt_flush_period only controls the frequency at which the checks happen. There are no guarantees that the particular RAM chunk will get saved. For instance, it does not make sense to regularly re-save a huge RAM chunk that only gets a few rows worh of updates. The search daemon determine whether to actually perform the flush with a few heuristics. Searching Matching modes So-called matching modes are a legacy feature that used to provide (very) limited query syntax and ranking support. Currently, they are deprecated in favor of full-text query language and so-called rankers. Starting with version 0.9.9-release, it is thus strongly recommended to use SPH_MATCH_EXTENDED and proper query syntax rather than any other legacy mode. All those other modes are actually internally converted to extended syntax anyway. SphinxAPI still defaults to SPH_MATCH_ALL but that is for compatibility reasons only. There are the following matching modes available: SPH_MATCH_ALL, matches all query words (default mode); SPH_MATCH_ANY, matches any of the query words; SPH_MATCH_PHRASE, matches query as a phrase, requiring perfect match; SPH_MATCH_BOOLEAN, matches query as a boolean expression (see ); SPH_MATCH_EXTENDED, matches query as an expression in Sphinx internal query language (see ); SPH_MATCH_EXTENDED2, an alias for SPH_MATCH_EXTENDED; SPH_MATCH_FULLSCAN, matches query, forcibly using the "full scan" mode as below. NB, any query terms will be ignored, such that filters, filter-ranges and grouping will still be applied, but no text-matching. SPH_MATCH_EXTENDED2 was used during 0.9.8 and 0.9.9 development cycle, when the internal matching engine was being rewritten (for the sake of additional functionality and better performance). By 0.9.9-release, the older version was removed, and SPH_MATCH_EXTENDED and SPH_MATCH_EXTENDED2 are now just aliases. The SPH_MATCH_FULLSCAN mode will be automatically activated in place of the specified matching mode when the following conditions are met: The query string is empty (ie. its length is zero). docinfo storage is set to extern. In full scan mode, all the indexed documents will be considered as matching. Such queries will still apply filters, sorting, and group by, but will not perform any full-text searching. This can be useful to unify full-text and non-full-text searching code, or to offload SQL server (there are cases when Sphinx scans will perform better than analogous MySQL queries). An example of using the full scan mode might be to find posts in a forum. By selecting the forum's user ID via SetFilter() but not actually providing any search text, Sphinx will match every document (i.e. every post) where SetFilter() would match - in this case providing every post from that user. By default this will be ordered by relevancy, followed by Sphinx document ID in ascending order (earliest first). Boolean query syntax Boolean queries allow the following special operators to be used: explicit operator AND: hello & world operator OR: hello | world operator NOT: hello -world hello !world grouping: ( hello world ) Here's an example query which uses all these operators: Boolean query example ( cat -dog ) | ( cat -mouse) There always is implicit AND operator, so "hello world" query actually means "hello & world". OR operator precedence is higher than AND, so "looking for cat | dog | mouse" means "looking for ( cat | dog | mouse )" and not "(looking for cat) | dog | mouse". Queries like "-dog", which implicitly include all documents from the collection, can not be evaluated. This is both for technical and performance reasons. Technically, Sphinx does not always keep a list of all IDs. Performance-wise, when the collection is huge (ie. 10-100M documents), evaluating such queries could take very long. Extended query syntax The following special operators and modifiers can be used when using the extended matching mode: operator OR: hello | world operator NOT: hello -world hello !world field search operator: @title hello @body world field position limit modifier (introduced in version 0.9.9-rc1): @body[50] hello multiple-field search operator: @(title,body) hello world all-field search operator: @* hello phrase search operator: "hello world" proximity search operator: "hello world"~10 quorum matching operator: "the world is a wonderful place"/3 strict order operator (aka operator "before"): aaa << bbb << ccc exact form modifier (introduced in version 0.9.9-rc1): raining =cats and =dogs field-start and field-end modifier (introduced in version 0.9.9-rc2): ^hello world$ NEAR, generalized proximity operator (introduced in version 2.0.1-beta): hello NEAR/3 world NEAR/4 "my test" SENTENCE operator (introduced in version 2.0.1-beta): all SENTENCE words SENTENCE "in one sentence" PARAGRAPH operator (introduced in version 2.0.1-beta): "Bill Gates" PARAGRAPH "Steve Jobs" zone limit operator: ZONE:(h3,h4) only in these titles Here's an example query that uses some of these operators: Extended matching mode: query example "hello world" @title "example program"~5 @body python -(php|perl) @* code The full meaning of this search is: Find the words 'hello' and 'world' adjacently in any field in a document; Additionally, the same document must also contain the words 'example' and 'program' in the title field, with up to, but not including, 5 words between the words in question; (E.g. "example PHP program" would be matched however "example script to introduce outside data into the correct context for your program" would not because two terms have 5 or more words between them) Additionally, the same document must contain the word 'python' in the body field, but not contain either 'php' or 'perl'; Additionally, the same document must contain the word 'code' in any field. There always is implicit AND operator, so "hello world" means that both "hello" and "world" must be present in matching document. OR operator precedence is higher than AND, so "looking for cat | dog | mouse" means "looking for ( cat | dog | mouse )" and not "(looking for cat) | dog | mouse". Field limit operator limits subsequent searching to a given field. Normally, query will fail with an error message if given field name does not exist in the searched index. However, that can be suppressed by specifying "@@relaxed" option at the very beginning of the query: @@relaxed @nosuchfield my query This can be helpful when searching through heterogeneous indexes with different schemas. Field position limit, introduced in version 0.9.9-rc1, additionaly restricts the searching to first N position within given field (or fields). For example, "@body[50] hello" will not match the documents where the keyword 'hello' occurs at position 51 and below in the body. Proximity distance is specified in words, adjusted for word count, and applies to all words within quotes. For instance, "cat dog mouse"~5 query means that there must be less than 8-word span which contains all 3 words, ie. "CAT aaa bbb ccc DOG eee fff MOUSE" document will not match this query, because this span is exactly 8 words long. Quorum matching operator introduces a kind of fuzzy matching. It will only match those documents that pass a given threshold of given words. The example above ("the world is a wonderful place"/3) will match all documents that have at least 3 of the 6 specified words. Strict order operator (aka operator "before"), introduced in version 0.9.9-rc2, will match the document only if its argument keywords occur in the document exactly in the query order. For instance, "black << cat" query (without quotes) will match the document "black and white cat" but not the "that cat was black" document. Order operator has the lowest priority. It can be applied both to just keywords and more complex expressions, ie. this is a valid query: (bag of words) << "exact phrase" << red|green|blue Exact form keyword modifier, introduced in version 0.9.9-rc1, will match the document only if the keyword occurred in exactly the specified form. The default behaviour is to match the document if the stemmed keyword matches. For instance, "runs" query will match both the document that contains "runs" and the document that contains "running", because both forms stem to just "run" - while "=runs" query will only match the first document. Exact form operator requires index_exact_words option to be enabled. This is a modifier that affects the keyword and thus can be used within operators such as phrase, proximity, and quorum operators. Field-start and field-end keyword modifiers, introduced in version 0.9.9-rc2, will make the keyword match only if it occurred at the very start or the very end of a fulltext field, respectively. For instance, the query "^hello world$" (with quotes and thus combining phrase operator and start/end modifiers) will only match documents that contain at least one field that has exactly these two keywords. Starting with 0.9.9-rc1, arbitrarily nested brackets and negations are allowed. However, the query must be possible to compute without involving an implicit list of all documents: // correct query aaa -(bbb -(ccc ddd)) // queries that are non-computable -aaa aaa | -bbb NEAR operator, added in 2.0.1-beta, is a generalized version of a proximity operator. The syntax is NEAR/N, it is case-sensitive, and no spaces are allowed beetwen the NEAR keyword, the slash sign, and the distance value. The original proximity operator only worked on sets of keywords. NEAR is more generic and can accept arbitrary subexpressions as its two arguments, matching the document when both subexpressions are found within N words of each other, no matter in which order. NEAR is left associative and has the same (lowest) precedence as BEFORE. You should also note how a (one NEAR/7 two NEAR/7 three) query using NEAR is not really equivalent to a ("one two three"~7) one using keyword proximity operator. The difference here is that the proximity operator allows for up to 6 non-matching words between all the 3 matching words, but the version with NEAR is less restrictive: it would allow for up to 6 words between 'one' and 'two' and then for up to 6 more between that two-word matching and a 'three' keyword. SENTENCE and PARAGRAPH operators, added in 2.0.1-beta, matches the document when both its arguments are within the same sentence or the same paragraph of text, respectively. The arguments can be either keywords, or phrases, or the instances of the same operator. Here are a few examples: one SENTENCE two one SENTENCE "two three" one SENTENCE "two three" SENTENCE four The order of the arguments within the sentence or paragraph does not matter. These operators only work on indexes built with index_sp (sentence and paragraph indexing feature) enabled, and revert to a mere AND otherwise. Refer to the index_sp directive documentation for the notes on what's considered a sentence and a paragraph. ZONE limit operator, added in 2.0.1-beta, is quite similar to field limit operator, but restricts matching to a given in-field zone or a list of zones. Note that the subsequent subexpressions are not required to match in a single contiguous span of a given zone, and may match in multiple spans. For instance, (ZONE:th hello world) query will match this example document: <th>Table 1. Local awareness of Hello Kitty brand.</th> .. some table data goes here .. <th>Table 2. World-wide brand awareness.</th> ZONE operator affects the query until the next field or ZONE limit operator, or the closing parenthesis. It only works on the indexes built with zones support (see ) and will be ignored otherwise. Search results ranking Ranking overview Ranking (aka weighting) of the search results can be defined as a process of computing a so-called relevance (aka weight) for every given matched document with regards to a given query that matched it. So relevance is in the end just a number attached to every document that estimates how relevant the document is to the query. Search results can then be sorted based on this number and/or some additional parameters, so that the most sought after results would come up higher on the results page. There is no single standard one-size-fits-all way to rank any document in any scenario. Moreover, there can not ever be such a way, because relevance is subjective. As in, what seems relevant to you might not seem relevant to me. Hence, in general case it's not just hard to compute, it's theoretically impossible. So ranking in Sphinx is configurable. It has a notion of a so-called ranker. A ranker can formally be defined as a function that takes document and query as its input and produces a relevance value as output. In layman's terms, a ranker controls exactly how (using which specific algorithm) will Sphinx assign weights to the document. Previously, this ranking function was rigidly bound to the matching mode. So in the legacy matching modes (that is, SPH_MATCH_ALL, SPH_MATCH_ANY, SPH_MATCH_PHRASE, and SPH_MATCH_BOOLEAN) you can not choose the ranker. You can only do that in the SPH_MATCH_EXTENDED mode. (Which is the only mode in SphinxQL and the suggested mode in SphinxAPI anyway.) To choose a non-default ranker you can either use SetRankingMode() with SphinxAPI, or OPTION ranker clause in SELECT statement when using SphinxQL. As a sidenote, legacy matching modes are internally implemented via the unified syntax anyway. When you use one of those modes, Sphinx just internally adjusts the query and sets the associated ranker, then executes the query using the very same unified code path. Available rankers Sphinx ships with a number of built-in rankers suited for different purposes. A number of them uses two factors, phrase proximity (aka LCS) and BM25. Phrase proximity works on the keyword positions, while BM25 works on the keyword frequencies. Basically, the better the degree of the phrase match between the document body and the query, the higher is the phrase proximity (it maxes out when the document contains the entire query as a verbatim quote). And BM25 is higher when the document contains more rare words. We'll save the detailed discussion for later. Currently implemented rankers are: SPH_RANK_PROXIMITY_BM25, the default ranking mode that uses and combines both phrase proximity and BM25 ranking. SPH_RANK_BM25, statistical ranking mode which uses BM25 ranking only (similar to most other full-text engines). This mode is faster but may result in worse quality on queries which contain more than 1 keyword. SPH_RANK_NONE, no ranking mode. This mode is obviously the fastest. A weight of 1 is assigned to all matches. This is sometimes called boolean searching that just matches the documents but does not rank them. SPH_RANK_WORDCOUNT, ranking by the keyword occurrences count. This ranker computes the per-field keyword occurrence counts, then multiplies them by field weights, and sums the resulting values. SPH_RANK_PROXIMITY, added in version 0.9.9-rc1, returns raw phrase proximity value as a result. This mode is internally used to emulate SPH_MATCH_ALL queries. SPH_RANK_MATCHANY, added in version 0.9.9-rc1, returns rank as it was computed in SPH_MATCH_ANY mode ealier, and is internally used to emulate SPH_MATCH_ANY queries. SPH_RANK_FIELDMASK, added in version 0.9.9-rc2, returns a 32-bit mask with N-th bit corresponding to N-th fulltext field, numbering from 0. The bit will only be set when the respective field has any keyword occurences satisfiying the query. SPH_RANK_SPH04, added in version 1.10-beta, is generally based on the default SPH_RANK_PROXIMITY_BM25 ranker, but additionally boosts the matches when they occur in the very beginning or the very end of a text field. Thus, if a field equals the exact query, SPH04 should rank it higher than a field that contains the exact query but is not equal to it. (For instance, when the query is "Hyde Park", a document entitled "Hyde Park" should be ranked higher than a one entitled "Hyde Park, London" or "The Hyde Park Cafe".) SPH_RANK_EXPR, added in version 2.0.2-beta, lets you specify the ranking formula in run time. It exposes a number of internal text factors and lets you define how the final weight should be computed from those factors. You can find more details about its syntax and a reference available factors in a subsection below. You should specify the SPH_RANK_ prefix and use capital letters only when using the SetRankingMode() call from the SphinxAPI. The API ports expose these as global constants. Using SphinxQL syntax, the prefix should be omitted and the ranker name is case insensitive. Example: // SphinxAPI $client->SetRankingMode ( SPH_RANK_SPH04 ); // SphinxQL mysql_query ( "SELECT ... OPTION ranker=sph04" ); Legacy matching modes rankers Legacy matching modes automatically select a ranker as follows: SPH_MATCH_ALL uses SPH_RANK_PROXIMITY ranker; SPH_MATCH_ANY uses SPH_RANK_MATCHANY ranker; SPH_MATCH_PHRASE uses SPH_RANK_PROXIMITY ranker; SPH_MATCH_BOOLEAN uses SPH_RANK_NONE ranker. Expression based ranker (SPH_RANK_EXPR) Expression ranker, added in version 2.0.2-beta, lets you change the ranking formula on the fly, on a per-query basis. For a quick kickoff, this is how you emulate PROXIMITY_BM25 ranker using the expression based one: SELECT *, WEIGHT() FROM myindex WHERE MATCH('hello world') OPTION ranker=expr('sum(lcs*user_weight)*1000+bm25') The output of this query must not change if you omit the OPTION clause, because the default ranker (PROXIMITY_BM25) behaves exactly like specified in the ranker formula above. But the expression ranker is somewhat more flexible than just that and provides access to many more factors. The ranking formula is an arbitrary arithmetic expression that can use constants, document attributes, built-in functions and operators (described in ), and also a few ranking-specific things that are only accessible in a ranking formula. Namely, those are field aggregation functions, field-level, and document-level ranking factors. A document-level factor is a numeric value computed by the ranking engine for every matched document with regards to the current query. (So it differs from a plain document attribute in that the attribute do not depend on the full text query, while factors might.) Those factors can be used anywhere in the ranking expression. Currently implemented document-level factors are: bm25 (integer), a document-level BM25 estimate (computed without keyword occurrence filtering). max_lcs (integer), a query-level maximum possible value that the sum(lcs*user_weight) expression can ever take. This can be useful for weight boost scaling. For instance, MATCHANY ranker formula uses this to guarantee that a full phrase match in any field rankes higher than any combination of partial matches in all fields. field_mask (integer), a document-level 32-bit mask of matched fields. query_word_count (integer), the number of unique keywords in a query, adjusted for a number of excluded keywords. For instance, both (one one one one) and (one !two) queries should assign a value of 1 to this factor, because there is just one unique non-excluded keyword. doc_word_count (integer), the number of unique keywords matched in the entire document. A field-level factor is a numeric value computed by the ranking engine for every matched in-document text field with regards to the current query. As more than one field can be matched by a query, but the final weight needs to be a single integer value, these values need to be folded into a single one. To achieve that, field-level factors can only be used within a field aggregation function, they can not be used anywhere in the expression. For example, you can not use (lcs+bm25) as your ranking expression, as lcs takes multiple values (one in every matched field). You should use (sum(lcs)+bm25) instead, that expression sums lcs over all matching fields, and then adds bm25 to that per-field sum. Currently implemented field-level factors are: lcs (integer), the length of a maximum verbatim match between the document and the query, coutned in words. LCS stands for Longest Common Subsequence (or Subset). Takes a minimum value of 1 when only stray keywords were matched in a field, and a maximum value of query keywords count when the entire query was matched in a field verbatim (in the exact query keywords order). For example, if the query is 'hello world' and the field contains these two words quoted from the query (that is, adjacent to each other, and exaclty in the query order), lcs will be 2. For example, if the query is 'hello world program' and the field contains 'hello world', lcs will be 2. Note that any subset of the query keyword works, not just a subset of adjacent keywords. For example, if the query is 'hello world program' and the field contains 'hello (test program)', lcs will be 2 just as well, because both 'hello' and 'program' matched in the same respective positions as they were in the query. Finally, if the query is 'hello world program' and the field contains 'hello world program', lcs will be 3. (Hopefully that is unsurpising at this point.) user_weight (integer), the user specified per-field weight (refer to SetFieldWeights() in SphinxAPI and OPTION field_weights in SphinxQL respectively). The weights default to 1 if not specified explicitly. hit_count (integer), the number of keyword occurrences that matched in the field. Note that a single keyword may occur multiple times. For example, if 'hello' occurs 3 times in a field and 'world' occurs 5 times, hit_count will be 8. word_count (integer), the number of unique keywords matched in the field. For example, if 'hello' and 'world' occur anywhere in a field, word_count will be 2, irregardless of how many times do both keywords occur. tf_idf (float), the sum of TF*IDF over all the keywords matched in the field. IDF is the Inverse Document Frequency, a floating point value between 0 and 1 that describes how frequent is the keywords (basically, 0 for a keyword that occurs in every document indexed, and 1 for a unique keyword that occurs in just a single document). TF is the Term Frequency, the number of matched keyword occurrences in the field. As a side note, tf_idf is actually computed by summing IDF over all matched occurences. That's by construction equivalent to summing TF*IDF over all matched keywords. min_hit_pos (integer), the position of the first matched keyword occurrence, counted in words. Indexing begins from position 1. min_best_span_pos (integer), the position of the first maximum LCS occurrences span. For example, assume that our query was 'hello world program' and 'hello world' subphrase was matched twice in the field, in positions 13 and 21. Assume that 'hello' and 'world' additionally occurred elsewhere in the field, but never next to each other and thus never as a subphrase match. In that case, min_best_span_pos will be 13. Note how for the single keyword queries min_best_span_pos will always equal min_hit_pos. exact_hit (boolean), whether a query was an exact match of the entire current field. Used in the SPH04 ranker. A field aggregation function is a single argument function that takes an expression with field-level factors, iterates it over all the matched fields, and computes the final results. Currently implemented field aggregation functions are: sum, sums the argument expression over all matched fields. For instance, sum(1) should return a number of matched fields. Expressions for the built-in rankers Most of the other rankers can actually be emulated with the expression based ranker. You just need to pass a proper expression. Such emulation is, of course, going to be slower than using the built-in, compiled ranker but still might be of interest if you want to fine-tune your ranking formula starting with one of the existing ones. Also, the formulas define the nitty gritty ranker details in a nicely readable fashion. SPH_RANK_PROXIMITY_BM25 = sum(lcs*user_weight)*1000+bm25 SPH_RANK_BM25 = bm25 SPH_RANK_NONE = 1 SPH_RANK_WORDCOUNT = sum(hit_count*user_weight) SPH_RANK_PROXIMITY = sum(lcs*user_weight) SPH_RANK_MATCHANY = sum((word_count+(lcs-1)*max_lcs)*user_weight) SPH_RANK_FIELDMASK = field_mask SPH_RANK_SPH04 = sum((4*lcs+2*(min_hit_pos==1)+exact_hit)*user_weight)*1000+bm25 Expressions, functions, and operators Sphinx lets you use arbitrary arithmetic expressions both via SphinxQL and SphinxAPI, involving attribute values, internal attributes (document ID and relevance weight), arithmetic operations, a number of built-in functions, and user-defined functions. This section documents the supported operators and functions. Here's the complete reference list for quick access. Arithmetic operators: +, -, *, /, %, DIV, MOD Comparison operators: <, > <=, >=, =, <> Boolean operators: AND, OR, NOT Bitwise operators: &, | ABS() BIGINT() CEIL() COS() CRC32() DAY() EXP() FIBONACCI() FLOOR() GEODIST() IDIV() IF() IN() INTERVAL() LN() LOG10() LOG2() MAX() MIN() MONTH() NOW() POW() SIN() SINT() SQRT() YEAR() YEARMONTH() YEARMONTHDAY() Operators Arithmetic operators: +, -, *, /, %, DIV, MOD The standard arithmetic operators. Arithmetic calculations involving those can be performed in three different modes: (a) using single-precision, 32-bit IEEE 754 floating point values (the default), (b) using signed 32-bit integers, (c) using 64-bit signed integers. The expression parser will automatically switch to integer mode if there are no operations the result in a floating point value. Otherwise, it will use the default floating point mode. For instance, a+b will be computed using 32-bit integers if both arguments are 32-bit integers; or using 64-bit integers if both arguments are integers but one of them is 64-bit; or in floats otherwise. However, a/b or sqrt(a) will always be computed in floats, because these operations return a result of non-integer type. To avoid the first, you can either use IDIV(a,b) or a DIV b form. Also, a*b will not be automatically promoted to 64-bit when the arguments are 32-bit. To enforce 64-bit results, you can use BIGINT(). (But note that if there are non-integer operations, BIGINT() will simply be ignored.) Comparison operators: <, > <=, >=, =, <> Comparison operators (eg. = or <=) return 1.0 when the condition is true and 0.0 otherwise. For instance, (a=b)+3 will evaluate to 4 when attribute 'a' is equal to attribute 'b', and to 3 when 'a' is not. Unlike MySQL, the equality comparisons (ie. = and <> operators) introduce a small equality threshold (1e-6 by default). If the difference between compared values is within the threshold, they will be considered equal. Boolean operators: AND, OR, NOT Boolean operators (AND, OR, NOT) were introduced in 0.9.9-rc2 and behave as usual. They are left-associative and have the least priority compared to other operators. NOT has more priority than AND and OR but nevertheless less than any other operator. AND and OR have the same priority so brackets use is recommended to avoid confusion in complex expressions. Bitwise operators: &, | These operators perform bitwise AND and OR respectively. The operands must be of an integer types. Introduced in version 1.10-beta. Numeric functions ABS() Returns the absolute value of the argument. CEIL() Returns the smallest integer value greater or equal to the argument. COS() Returns the cosine of the argument. EXP() Returns the exponent of the argument (e=2.718... to the power of the argument). FIBONACCI() Returns the N-th Fibonacci number, where N is the integer argument. That is, arguments of 0 and up will generate the values 0, 1, 1, 2, 3, 5, 8, 13 and so on. Note that the computations are done using 32-bit integer math and thus numbers 48th and up will be returned modulo 2^32. FLOOR() Returns the largest integer value lesser or equal to the argument. IDIV() Returns the result of an integer division of the first argument by the second argument. Both arguments must be of an integer type. LN() Returns the natural logarithm of the argument (with the base of e=2.718...). LOG10() Returns the common logarithm of the argument (with the base of 10). LOG2() Returns the binary logarithm of the argument (with the base of 2). MAX() Returns the bigger of two arguments. MIN() Returns the smaller of two arguments. POW() Returns the first argument raised to the power of the second argument. SIN() Returns the sine of the argument. SQRT() Returns the square root of the argument. Date and time functions DAY() Returns the integer day of month (in 1..31 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta. MONTH() Returns the integer month (in 1..12 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta. NOW() Returns the current timestamp as an INTEGER. Introduced in version 0.9.9-rc1. YEAR() Returns the integer year (in 1969..2038 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta. YEARMONTH() Returns the integer year and month code (in 196912..203801 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta. YEARMONTHDAY() Returns the integer year, month, and date code (in 19691231..20380119 range) from a timestamp argument, according to the current timezone. Introduced in version 2.0.1-beta. Type conversion functions BIGINT() Forcibly promotes the integer argument to 64-bit type, and does nothing on floating point argument. It's intended to help enforce evaluation of certain expressions (such as a*b) in 64-bit mode even though all the arguments are 32-bit. Introduced in version 0.9.9-rc1. SINT() Forcibly reinterprets its 32-bit unsigned integer argument as signed, and also expands it to 64-bit type (because 32-bit type is unsigned). It's easily illustrated by the following example: 1-2 normally evaluates to 4294967295, but SINT(1-2) evaluates to -1. Introduced in version 1.10-beta. Comparison functions IF() IF() behavior is slightly different that that of its MySQL counterpart. It takes 3 arguments, check whether the 1st argument is equal to 0.0, returns the 2nd argument if it is not zero, or the 3rd one when it is. Note that unlike comparison operators, IF() does not use a threshold! Therefore, it's safe to use comparison results as its 1st argument, but arithmetic operators might produce unexpected results. For instance, the following two calls will produce different results even though they are logically equivalent: IF ( sqrt(3)*sqrt(3)-3<>0, a, b ) IF ( sqrt(3)*sqrt(3)-3, a, b ) In the first case, the comparison operator <> will return 0.0 (false) because of a threshold, and IF() will always return 'b' as a result. In the second one, the same sqrt(3)*sqrt(3)-3 expression will be compared with zero without threshold by the IF() function itself. But its value will be slightly different from zero because of limited floating point calculations precision. Because of that, the comparison with 0.0 done by IF() will not pass, and the second variant will return 'a' as a result. IN() IN(expr,val1,val2,...), introduced in version 0.9.9-rc1, takes 2 or more arguments, and returns 1 if 1st argument (expr) is equal to any of the other arguments (val1..valN), or 0 otherwise. Currently, all the checked values (but not the expression itself!) are required to be constant. (Its technically possible to implement arbitrary expressions too, and that might be implemented in the future.) Constants are pre-sorted and then binary search is used, so IN() even against a big arbitrary list of constants will be very quick. Starting with 0.9.9-rc2, first argument can also be a MVA attribute. In that case, IN() will return 1 if any of the MVA values is equal to any of the other arguments. Starting with 2.0.1-beta, IN() also supports IN(expr,@uservar) syntax to check whether the value belongs to the list in the given global user variable. INTERVAL() INTERVAL(expr,point1,point2,point3,...), introduced in version 0.9.9-rc1, takes 2 or more arguments, and returns the index of the argument that is less than the first argument: it returns 0 if expr<point1, 1 if point1<=expr<point2, and so on. It is required that point1<point2<...<pointN for this function to work correctly. Miscellaneous functions CRC32() Returns the CRC32 value of a string argument. Introduced in version 2.0.1-beta. GEODIST() GEODIST(lat1,long1,lat2,long2) function, introduced in version 0.9.9-rc2, computes geosphere distance between two given points specified by their coordinates. Note that both latitudes and longitudes must be in radians and the result will be in meters. You can use arbitrary expression as any of the four coordinates. An optimized path will be selected when one pair of the arguments refers directly to a pair attributes and the other one is constant. Sorting modes There are the following result sorting modes available: SPH_SORT_RELEVANCE mode, that sorts by relevance in descending order (best matches first); SPH_SORT_ATTR_DESC mode, that sorts by an attribute in descending order (bigger attribute values first); SPH_SORT_ATTR_ASC mode, that sorts by an attribute in ascending order (smaller attribute values first); SPH_SORT_TIME_SEGMENTS mode, that sorts by time segments (last hour/day/week/month) in descending order, and then by relevance in descending order; SPH_SORT_EXTENDED mode, that sorts by SQL-like combination of columns in ASC/DESC order; SPH_SORT_EXPR mode, that sorts by an arithmetic expression. SPH_SORT_RELEVANCE ignores any additional parameters and always sorts matches by relevance rank. All other modes require an additional sorting clause, with the syntax depending on specific mode. SPH_SORT_ATTR_ASC, SPH_SORT_ATTR_DESC and SPH_SORT_TIME_SEGMENTS modes require simply an attribute name. SPH_SORT_RELEVANCE is equivalent to sorting by "@weight DESC, @id ASC" in extended sorting mode, SPH_SORT_ATTR_ASC is equivalent to "attribute ASC, @weight DESC, @id ASC", and SPH_SORT_ATTR_DESC to "attribute DESC, @weight DESC, @id ASC" respectively. SPH_SORT_TIME_SEGMENTS mode In SPH_SORT_TIME_SEGMENTS mode, attribute values are split into so-called time segments, and then sorted by time segment first, and by relevance second. The segments are calculated according to the current timestamp at the time when the search is performed, so the results would change over time. The segments are as follows: last hour, last day, last week, last month, last 3 months, everything else. These segments are hardcoded, but it is trivial to change them if necessary. This mode was added to support searching through blogs, news headlines, etc. When using time segments, recent records would be ranked higher because of segment, but withing the same segment, more relevant records would be ranked higher - unlike sorting by just the timestamp attribute, which would not take relevance into account at all. SPH_SORT_EXTENDED mode In SPH_SORT_EXTENDED mode, you can specify an SQL-like sort expression with up to 5 attributes (including internal attributes), eg: @relevance DESC, price ASC, @id DESC Both internal attributes (that are computed by the engine on the fly) and user attributes that were configured for this index are allowed. Internal attribute names must start with magic @-symbol; user attribute names can be used as is. In the example above, and are internal attributes and is user-specified. Known internal attributes are: @id (match ID) @weight (match weight) @rank (match weight) @relevance (match weight) @random (return results in random order) and are just additional aliases to . SPH_SORT_EXPR mode Expression sorting mode lets you sort the matches by an arbitrary arithmetic expression, involving attribute values, internal attributes (@id and @weight), arithmetic operations, and a number of built-in functions. Here's an example: $cl->SetSortMode ( SPH_SORT_EXPR, "@weight + ( user_karma + ln(pageviews) )*0.1" ); The operators and functions supported in the expressions are discussed in a separate section, . Grouping (clustering) search results Sometimes it could be useful to group (or in other terms, cluster) search results and/or count per-group match counts - for instance, to draw a nice graph of how much maching blog posts were there per each month; or to group Web search results by site; or to group matching forum posts by author; etc. In theory, this could be performed by doing only the full-text search in Sphinx and then using found IDs to group on SQL server side. However, in practice doing this with a big result set (10K-10M matches) would typically kill performance. To avoid that, Sphinx offers so-called grouping mode. It is enabled with SetGroupBy() API call. When grouping, all matches are assigned to different groups based on group-by value. This value is computed from specified attribute using one of the following built-in functions: SPH_GROUPBY_DAY, extracts year, month and day in YYYYMMDD format from timestamp; SPH_GROUPBY_WEEK, extracts year and first day of the week number (counting from year start) in YYYYNNN format from timestamp; SPH_GROUPBY_MONTH, extracts month in YYYYMM format from timestamp; SPH_GROUPBY_YEAR, extracts year in YYYY format from timestamp; SPH_GROUPBY_ATTR, uses attribute value itself for grouping. The final search result set then contains one best match per group. Grouping function value and per-group match count are returned along as "virtual" attributes named @group and @count respectively. The result set is sorted by group-by sorting clause, with the syntax similar to sorting clause syntax. In addition to and , group-by sorting clause may also include: @group (groupby function value), @count (amount of matches in group). The default mode is to sort by groupby value in descending order, ie. by . On completion, result parameter would contain total amount of matching groups over he whole index. WARNING: grouping is done in fixed memory and thus its results are only approximate; so there might be more groups reported in than actually present. might also be underestimated. To reduce inaccuracy, one should raise . If allows to store all found groups, results will be 100% correct. For example, if sorting by relevance and grouping by "published" attribute with SPH_GROUPBY_DAY function, then the result set will contain one most relevant match per each day when there were any matches published, with day number and per-day match count attached, sorted by day number in descending order (ie. recent days first). Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported through SetSelect() API call when using GROUP BY. Distributed searching To scale well, Sphinx has distributed searching capabilities. Distributed searching is useful to improve query latency (ie. search time) and throughput (ie. max queries/sec) in multi-server, multi-CPU or multi-core environments. This is essential for applications which need to search through huge amounts data (ie. billions of records and terabytes of text). The key idea is to horizontally partition (HP) searched data accross search nodes and then process it in parallel. Partitioning is done manually. You should setup several instances of Sphinx programs (indexer and searchd) on different servers; make the instances index (and search) different parts of data; configure a special distributed index on some of the searchd instances; and query this index. This index only contains references to other local and remote indexes - so it could not be directly reindexed, and you should reindex those indexes which it references instead. When searchd receives a query against distributed index, it does the following: connects to configured remote agents; issues the query; sequentially searches configured local indexes (while the remote agents are searching); retrieves remote agents' search results; merges all the results together, removing the duplicates; sends the merged resuls to client. From the application's point of view, there are no differences between searching through a regular index, or a distributed index at all. That is, distributed indexes are fully transparent to the application, and actually there's no way to tell whether the index you queried was distributed or local. (Even though as of 0.9.9 Sphinx does not allow to combine searching through distributed indexes with anything else, this constraint will be lifted in the future.) Any searchd instance could serve both as a master (which aggregates the results) and a slave (which only does local searching) at the same time. This has a number of uses: every machine in a cluster could serve as a master which searches the whole cluster, and search requests could be balanced between masters to achieve a kind of HA (high availability) in case any of the nodes fails; if running within a single multi-CPU or multi-core machine, there would be only 1 searchd instance quering itself as an agent and thus utilizing all CPUs/core. It is scheduled to implement better HA support which would allow to specify which agents mirror each other, do health checks, keep track of alive agents, load-balance requests, etc. <filename>searchd</filename> query log formats In version 2.0.1-beta and above two query log formats are supported. Previous versions only supported a custom plain text format. That format is still the default one. However, while it might be more convenient for manual monitoring and review, but hard to replay for benchmarks, it only logs search queries but not the other types of requests, does not always contain the complete search query data, etc. The default text format is also harder (and sometimes impossible) to replay for benchmarking purposes. The new sphinxql format alleviates that. It aims to be complete and automatable, even though at the cost of brevity and readability. Plain log format By default, searchd logs all succesfully executed search queries into a query log file. Here's an example: [Fri Jun 29 21:17:58 2007] 0.004 sec [all/0/rel 35254 (0,20)] [lj] test [Fri Jun 29 21:20:34 2007] 0.024 sec [all/0/rel 19886 (0,20) @channel_id] [lj] test This log format is as follows: [query-date] query-time [match-mode/filters-count/sort-mode total-matches (offset,limit) @groupby-attr] [index-name] query Match mode can take one of the following values: "all" for SPH_MATCH_ALL mode; "any" for SPH_MATCH_ANY mode; "phr" for SPH_MATCH_PHRASE mode; "bool" for SPH_MATCH_BOOLEAN mode; "ext" for SPH_MATCH_EXTENDED mode; "ext2" for SPH_MATCH_EXTENDED2 mode; "scan" if the full scan mode was used, either by being specified with SPH_MATCH_FULLSCAN, or if the query was empty (as documented under Matching Modes) Sort mode can take one of the following values: "rel" for SPH_SORT_RELEVANCE mode; "attr-" for SPH_SORT_ATTR_DESC mode; "attr+" for SPH_SORT_ATTR_ASC mode; "tsegs" for SPH_SORT_TIME_SEGMENTS mode; "ext" for SPH_SORT_EXTENDED mode. Additionally, if searchd was started with , there will be a block of data after where the index(es) searched are listed. A query log entry might take the form of: [Fri Jun 29 21:17:58 2007] 0.004 sec [all/0/rel 35254 (0,20)] [lj] [ios=6 kb=111.1 ms=0.5] test This additional block is information regarding I/O operations in performing the search: the number of file I/O operations carried out, the amount of data in kilobytes read from the index files and time spent on I/O operations (although there is a background processing component, the bulk of this time is the I/O operation time). SphinxQL log format This is a new log format introduced in 2.0.1-beta, with the goals begin logging everything and then some, and in a format easy to automate (for insance, automatically replay). New format can either be enabled via the query_log_format directive in the configuration file, or switched back and forth on the fly with the SET GLOBAL query_log_format=... statement via SphinxQL. In the new format, the example from the previous section would look as follows. (Wrapped below for readability, but with just one query per line in the actual log.) /* Fri Jun 29 21:17:58.609 2007 2011 conn 2 wall 0.004 found 35254 */ SELECT * FROM lj WHERE MATCH('test') OPTION ranker=proximity; /* Fri Jun 29 21:20:34 2007.555 conn 3 wall 0.024 found 19886 */ SELECT * FROM lj WHERE MATCH('test') GROUP BY channel_id OPTION ranker=proximity; Note that all requests would be logged in this format, including those sent via SphinxAPI and SphinxSE, not just those sent via SphinxQL. Also note, that this kind of logging works only with plain log files and will not work if you use 'syslog' for logging. The features of SphinxQL log format compared to the default text one are as follows. All request types should be logged. (This is still work in progress.) Full statement data will be logged where possible. Errors and warnings are logged. The log should be automatically replayable via SphinxQL. Additional performance counters (currently, per-agent distributed query times) are logged. Every request (including both SphinxAPI and SphinxQL) request must result in exactly one log line. All request types, including INSERT, CALL SNIPPETS, etc will eventually get logged, though as of time of this writing, that is a work in progress). Every log line must be a valid SphinxQL statement that reconstructs the full request, except if the logged request is too big and needs shortening for performance reasons. Additional messages, counters, etc can be logged in the comments section after the request. MySQL protocol support and SphinxQL Starting with version 0.9.9-rc2, Sphinx searchd daemon supports MySQL binary network protocol and can be accessed with regular MySQL API. For instance, 'mysql' CLI client program works well. Here's an example of querying Sphinx using MySQL client: $ mysql -P 9306 Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 1 Server version: 0.9.9-dev (r1734) Type 'help;' or '\h' for help. Type '\c' to clear the buffer. mysql> SELECT * FROM test1 WHERE MATCH('test') -> ORDER BY group_id ASC OPTION ranker=bm25; +------+--------+----------+------------+ | id | weight | group_id | date_added | +------+--------+----------+------------+ | 4 | 1442 | 2 | 1231721236 | | 2 | 2421 | 123 | 1231721236 | | 1 | 2421 | 456 | 1231721236 | +------+--------+----------+------------+ 3 rows in set (0.00 sec) Note that mysqld was not even running on the test machine. Everything was handled by searchd itself. The new access method is supported in addition to native APIs which all still work perfectly well. In fact, both access methods can be used at the same time. Also, native API is still the default access method. MySQL protocol support needs to be additionally configured. This is a matter of 1-line config change, adding a new listener with mysql41 specified as a protocol: listen = localhost:9306:mysql41 Just supporting the protocol and not the SQL syntax would be useless so Sphinx now also supports a subset of SQL that we dubbed SphinxQL. It supports the standard querying all the index types with SELECT, modifying RT indexes with INSERT, REPLACE, and DELETE, and much more. Full SphinxQL reference is available in . Multi-queries Multi-queries, or query batches, let you send multiple queries to Sphinx in one go (more formally, one network request). Two API methods that implement multi-query mechanism are AddQuery() and RunQueries(). You can also run multiple queries with SphinxQL, see . (In fact, regular Query() call is internally implemented as a single AddQuery() call immediately followed by RunQueries() call.) AddQuery() captures the current state of all the query settings set by previous API calls, and memorizes the query. RunQueries() actually sends all the memorized queries, and returns multiple result sets. There are no restrictions on the queries at all, except just a sanity check on a number of queries in a single batch (see ). Why use multi-queries? Generally, it all boils down to performance. First, by sending requests to searchd in a batch instead of one by one, you always save a bit by doing less network roundtrips. Second, and somewhat more important, sending queries in a batch enables searchd to perform certain internal optimizations. As new types of optimizations are being added over time, it generally makes sense to pack all the queries into batches where possible, so that simply upgrading Sphinx to a new version would automatically enable new optimizations. In the case when there aren't any possible batch optimizations to apply, queries will be processed one by one internally. Why (or rather when) not use multi-queries? Multi-queries requires all the queries in a batch to be independent, and sometimes they aren't. That is, sometimes query B is based on query A results, and so can only be set up after executing query A. For instance, you might want to display results from a secondary index if and only if there were no results found in a primary index. Or maybe just specify offset into 2nd result set based on the amount of matches in the 1st result set. In that case, you will have to use separate queries (or separate batches). As of 0.9.10, there are two major optimizations to be aware of: common query optimization (available since 0.9.8); and common subtree optimization (available since 0.9.10). Common query optimization means that searchd will identify all those queries in a batch where only the sorting and group-by settings differ, and only perform searching once. For instance, if a batch consists of 3 queries, all of them are for "ipod nano", but 1st query requests top-10 results sorted by price, 2nd query groups by vendor ID and requests top-5 vendors sorted by rating, and 3rd query requests max price, full-text search for "ipod nano" will only be performed once, and its results will be reused to build 3 different result sets. So-called faceted searching is a particularly important case that benefits from this optimization. Indeed, faceted searching can be implemented by running a number of queries, one to retrieve search results themselves, and a few other ones with same full-text query but different group-by settings to retrieve all the required groups of results (top-3 authors, top-5 vendors, etc). And as long as full-text query and filtering settings stay the same, common query optimization will trigger, and greatly improve performance. Common subtree optimization is even more interesting. It lets searchd exploit similarities between batched full-text queries. It identifies common full-text query parts (subtress) in all queries, and caches them between queries. For instance, look at the following query batch: barack obama president barack obama john mccain barack obama speech There's a common two-word part ("barack obama") that can be computed only once, then cached and shared across the queries. And common subtree optimization does just that. Per-query cache size is strictly controlled by subtree_docs_cache and subtree_hits_cache directives (so that caching all sxiteen gazillions of documents that match "i am" does not exhaust the RAM and instantly kill your server). Here's a code sample (in PHP) that fire the same query in 3 different sorting modes: require ( "sphinxapi.php" ); $cl = new SphinxClient (); $cl->SetMatchMode ( SPH_MATCH_EXTENDED ); $cl->SetSortMode ( SPH_SORT_RELEVANCE ); $cl->AddQuery ( "the", "lj" ); $cl->SetSortMode ( SPH_SORT_EXTENDED, "published desc" ); $cl->AddQuery ( "the", "lj" ); $cl->SetSortMode ( SPH_SORT_EXTENDED, "published asc" ); $cl->AddQuery ( "the", "lj" ); $res = $cl->RunQueries(); How to tell whether the queries in the batch were actually optimized? If they were, respective query log will have a "multiplier" field that specifies how many queries were processed together: [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/rel 747541 (0,20)] [lj] the [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/ext 747541 (0,20)] [lj] the [Sun Jul 12 15:18:17.000 2009] 0.040 sec x3 [ext/0/ext 747541 (0,20)] [lj] the Note the "x3" field. It means that this query was optimized and processed in a sub-batch of 3 queries. For reference, this is how the regular log would look like if the queries were not batched: [Sun Jul 12 15:18:17.062 2009] 0.059 sec [ext/0/rel 747541 (0,20)] [lj] the [Sun Jul 12 15:18:17.156 2009] 0.091 sec [ext/0/ext 747541 (0,20)] [lj] the [Sun Jul 12 15:18:17.250 2009] 0.092 sec [ext/0/ext 747541 (0,20)] [lj] the Note how per-query time in multi-query case was improved by a factor of 1.5x to 2.3x, depending on a particular sorting mode. In fact, for both common query and common subtree optimizations, there were reports of 3x and even more improvements, and that's from production instances, not just synthetic tests. Collations Introduced to Sphinx in version 2.0.1-beta to supplement string sorting, collations essentially affect the string attribute comparisons. They specify both the character set encoding and the strategy that Sphinx uses to compare strings when doing ORDER BY or GROUP BY with a string attribute involved. String attributes are stored as is when indexing, and no character set or language information is attached to them. That's okay as long as Sphinx only needs to store and return the strings to the calling application verbatim. But when you ask Sphinx to sort by a string value, that request immediately becomes quite ambiguous. First, single-byte (ASCII, or ISO-8859-1, or Windows-1251) strings need to be processed differently that the UTF-8 ones that may encode every character with a variable number of bytes. So we need to know what is the character set type to interepret the raw bytes as meaningful characters properly. Second, we additionally need to know the language-specific string sorting rules. For instance, when sorting according to US rules in en_US locale, the accented character 'ï' (small letter i with diaeresis) should be placed somewhere after 'z'. However, when sorting with French rules and fr_FR locale in mind, it should be placed between 'i' and 'j'. And some other set of rules might choose to ignore accents at all, allowing 'ï' and 'i' to be mixed arbitrarily. Third, but not least, we might need case-sensitive sorting in some scenarios and case-insensitive sorting in some others. Collations combine all of the above: the character set, the lanugage rules, and the case sensitivity. Sphinx currently provides the following four collations. The first two collations rely on several standard C library (libc) calls and can thus support any locale that is installed on your system. They provide case-insensitive (_ci) and case-sensitive (_cs) comparisons respectively. By default they will use C locale, effectively resorting to bytewise comparisons. To change that, you need to specify a different available locale using collation_libc_locale directive. The list of locales available on your system can usually be obtained with the locale command: $ locale -a C en_AG en_AU.utf8 en_BW.utf8 en_CA.utf8 en_DK.utf8 en_GB.utf8 en_HK.utf8 en_IE.utf8 en_IN en_NG en_NZ.utf8 en_PH.utf8 en_SG.utf8 en_US.utf8 en_ZA.utf8 en_ZW.utf8 es_ES fr_FR POSIX ru_RU.utf8 ru_UA.utf8 The specific list of the system locales may vary. Consult your OS documentation to install additional needed locales. and locales are built-in into Sphinx. The first one is a generic collation for UTF-8 data (without any so-called language tailoring); it should behave similar to collation in MySQL. The second one is a simple bytewise comparison. Collation can be overriden via SphinxQL on a per-session basis using SET collation_connection statement. All subsequent SphinxQL queries will use this collation. SphinxAPI and SphinxSE queries will use the server default collation, as specified in collation_server configuration directive. Sphinx currently defaults to collation. Collations should affect all string attribute comparisons, including those within ORDER BY and GROUP BY, so differently ordered or grouped results can be returned depending on the collation chosen. User-defined functions (UDF) Starting with 2.0.1-beta, Sphinx supports User-Defined Functions, or UDF for short. They can be loaded and unloaded dynamically into searchd without having to restart the daemon, and used in expressions when searching. UDF features at a glance are as follows. Functions can take integer (both 32-bit and 64-bit), float, string, or MVA arguments. Functions can return integer or float values. Functions can check the argument number, types, and names and raise errors. Only simple functions (that is, non-aggregate ones) are currently supported. User-defined functions need your OS to support dynamically loadable libraries (aka shared objects). Most of the modern OSes are eligible, including Linux, Windows, MacOS, Solaris, BSD and others. (The internal testing has been done on Linux and Windows.) The UDF libraries must reside in a directory specified by plugin_dir directive, and the server must be configured to use mode. Relative paths to the library files are not allowed. Once the library is succesfully built and copied to the trusted location, you can then dynamically install and deinstall the functions using CREATE FUNCTION and DROP FUNCTION statements respectively. A single library can contain multiple functions. A library gets loaded when you first install a function from it, and unloaded when you deinstall all the functions from that library. The library functions that will implement a UDF visible to SQL statements need to follow C calling convention, and a simple naming convention. Sphinx source distribution provides a sample file, src/udfexample.c, that defines a few simple functions showing how to work with integer, string, and MVA arguments; you can use that one as a foundation for your new functions. It includes the UDF interface header file, src/sphinxudf.h, that defines the required types and structures. sphinxudf.h header is standalone, that is, does not require any other parts of Sphinx source to compile. Every function that you intend to use in your SELECT statements requires at least two corresponding C/C++ functions: the initialization call, and the function call itself. You can also optionally define the deinitialization call if your function requires any post-query cleanup. (For instance, if you were allocating any memory in either the initialization call or the function calls.) Function names in SQL are case insensitive, C function names are not. They need to be all lower-case. Mistakes in function name prevent UDFs from loading. You also have to pay special attention to the calling convention used when compiling, the list and the types of arguments, and the return type of the main function call. Mistakes in either are likely to crash the server, or result in unexpected results in the best case. Last but not least, all functions need to be thread-safe. Let's assume for the sake of example that your UDF name in SphinxQL will be MYFUNC. The initialization, main, and deinitialization functions would then need to be named as follows and take the following arguments: /// initialization function /// called once during query initialization /// returns 0 on success /// returns non-zero and fills error_message buffer on failure int myfunc_init ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_message ); /// main call function /// returns the computed value /// writes non-zero value into error_flag to indicate errors RETURN_TYPE myfunc ( SPH_UDF_INIT * init, SPH_UDF_ARGS * args, char * error_flag ); /// optional deinitialization function /// called once to cleanup once query processing is done void myfunc_deinit ( SPH_UDF_INIT * init ); The two mentioned structures, SPH_UDF_INIT and SPH_UDF_ARGS, are defined in the src/sphinxudf.h interface header and documented there. RETURN_TYPE of the main function must be one of the following: int for the functions that return INT. sphinx_int64_t for the functions that return BIGINT. float for the functions that return FLOAT. The calling sequence is as follows. myfunc_init() is called once when initializing the query. It can return a non-zero code to indicate a failure; in that case query is not executed, and the error message from the error_message buffer is returned. Otherwise, myfunc() is be called for every row, and a myfunc_deinit() is then called when the query ends. myfunc() can indicate an error by writing a non-zero byte value to error_flag, in that case, it will no more be called for subsequent rows, and a default value of 0 will be substituted. Sphinx might or might not choose to terminate such queries early, neither behavior is currently guaranteed. Command line tools reference As mentioned elsewhere, Sphinx is not a single program called 'sphinx', but a collection of 4 separate programs which collectively form Sphinx. This section covers these tools and how to use them. <filename>indexer</filename> command reference indexer is the first of the two principle tools as part of Sphinx. Invoked from either the command line directly, or as part of a larger script, indexer is solely responsible for gathering the data that will be searchable. The calling syntax for indexer is as follows: indexer [OPTIONS] [indexname1 [indexname2 [...]]] Essentially you would list the different possible indexes (that you would later make available to search) in sphinx.conf, so when calling indexer, as a minimum you need to be telling it what index (or indexes) you want to index. If sphinx.conf contained details on 2 indexes, mybigindex and mysmallindex, you could do the following: $ indexer mybigindex $ indexer mysmallindex mybigindex As part of the configuration file, sphinx.conf, you specify one or more indexes for your data. You might call indexer to reindex one of them, ad-hoc, or you can tell it to process all indexes - you are not limited to calling just one, or all at once, you can always pick some combination of the available indexes. The majority of the options for indexer are given in the configuration file, however there are some options you might need to specify on the command line as well, as they can affect how the indexing operation is performed. These options are: ( for short) tells indexer to use the given file as its configuration. Normally, it will look for sphinx.conf in the installation directory (e.g. /usr/local/sphinx/etc/sphinx.conf if installed into /usr/local/sphinx), followed by the current directory you are in when calling indexer from the shell. This is most of use in shared environments where the binary files are installed somewhere like /usr/local/sphinx/ but you want to provide users with the ability to make their own custom Sphinx set-ups, or if you want to run multiple instances on a single server. In cases like those you could allow them to create their own sphinx.conf files and pass them to indexer with this option. For example: $ indexer --config /home/myuser/sphinx.conf myindex tells indexer to update every index listed in sphinx.conf, instead of listing individual indexes. This would be useful in small configurations, or cron-type or maintenance jobs where the entire index set will get rebuilt each day, or week, or whatever period is best. Example usage: $ indexer --config /home/myuser/sphinx.conf --all is used for rotating indexes. Unless you have the situation where you can take the search function offline without troubling users, you will almost certainly need to keep search running whilst indexing new documents. creates a second index, parallel to the first (in the same place, simply including .new in the filenames). Once complete, indexer notifies searchd via sending the signal, and searchd will attempt to rename the indexes (renaming the existing ones to include .old and renaming the .new to replace them), and then start serving from the newer files. Depending on the setting of seamless_rotate, there may be a slight delay in being able to search the newer indexes. Example usage: $ indexer --rotate --all tells indexer not to output anything, unless there is an error. Again, most used for cron-type, or other script jobs where the output is irrelevant or unnecessary, except in the event of some kind of error. Example usage: $ indexer --rotate --all --quiet does not display progress details as they occur; instead, the final status details (such as documents indexed, speed of indexing and so on are only reported at completion of indexing. In instances where the script is not being run on a console (or 'tty'), this will be on by default. Example usage: $ indexer --rotate --all --noprogress reviews the index source, as if it were indexing the data, and produces a list of the terms that are being indexed. In other words, it produces a list of all the searchable terms that are becoming part of the index. Note; it does not update the index in question, it simply processes the data 'as if' it were indexing, including running queries defined with or . outputfile.txt will contain the list of words, one per line, sorted by frequency with most frequent first, and N specifies the maximum number of words that will be listed; if sufficiently large to encompass every word in the index, only that many words will be returned. Such a dictionary list could be used for client application features around "Did you mean..." functionality, usually in conjunction with , below. Example: $ indexer myindex --buildstops word_freq.txt 1000 This would produce a document in the current directory, word_freq.txt with the 1,000 most common words in 'myindex', ordered by most common first. Note that the file will pertain to the last index indexed when specified with multiple indexes or (i.e. the last one listed in the configuration file) works with (and is ignored if is not specified). As provides the list of words used within the index, adds the quantity present in the index, which would be useful in establishing whether certain words should be considered stopwords if they are too prevalent. It will also help with developing "Did you mean..." features where you can how much more common a given word compared to another, similar one. Example: $ indexer myindex --buildstops word_freq.txt 1000 --buildfreqs This would produce the word_freq.txt as above, however after each word would be the number of times it occurred in the index in question. is used for physically merging indexes together, for example if you have a main+delta scheme, where the main index rarely changes, but the delta index is rebuilt frequently, and would be used to combine the two. The operation moves from right to left - the contents of src-index get examined and physically combined with the contents of dst-index and the result is left in dst-index. In pseudo-code, it might be expressed as: dst-index += src-index An example: $ indexer --merge main delta --rotate In the above example, where the main is the master, rarely modified index, and delta is the less frequently modified one, you might use the above to call indexer to combine the contents of the delta into the main index and rotate the indexes. runs the filter range given upon merging. Specifically, as the merge is applied to the destination index (as part of , and is ignored if is not specified), indexer will also filter the documents ending up in the destination index, and only documents will pass through the filter given will end up in the final index. This could be used for example, in an index where there is a 'deleted' attribute, where 0 means 'not deleted'. Such an index could be merged with: $ indexer --merge main delta --merge-dst-range deleted 0 0 Any documents marked as deleted (value 1) would be removed from the newly-merged destination index. It can be added several times to the command line, to add successive filters to the merge, all of which must be met in order for a document to become part of the final index. dumps rows fetched by SQL source(s) into the specified file, in a MySQL compatible syntax. Resulting dumps are the exact representation of data as received by indexer and help to repeat indexing-time issues. guarantees that every row that caused problems indexing (duplicate, zero, or missing document ID; or file field IO issues; etc) will be reported. By default, this option is off, and problem summaries may be reported instead. is useful when you are rebuilding many big indexes, and want each one rotated into searchd as soon as possible. With , indexer will send a SIGHUP signal to searchd after succesfully completing the work on each index. (The default behavior is to send a single SIGHUP after all the indexes were built.) prints out SQL queries that indexer sends to the database, along with SQL connection and disconnection events. That is useful to diagnose and fix problems with SQL sources. <filename>searchd</filename> command reference searchd is the second of the two principle tools as part of Sphinx. searchd is the part of the system which actually handles searches; it functions as a server and is responsible for receiving queries, processing them and returning a dataset back to the different APIs for client applications. Unlike indexer, searchd is not designed to be run either from a regular script or command-line calling, but instead either as a daemon to be called from init.d (on Unix/Linux type systems) or to be called as a service (on Windows-type systems), so not all of the command line options will always apply, and so will be build-dependent. Calling searchd is simply a case of: $ searchd [OPTIONS] The options available to searchd on all builds are: ( for short) lists all of the parameters that can be called in your particular build of searchd. ( for short) tells searchd to use the given file as its configuration, just as with indexer above. is used to asynchronously stop searchd, using the details of the PID file as specified in the sphinx.conf file, so you may also need to confirm to searchd which configuration file to use with the option. NB, calling will also make sure any changes applied to the indexes with UpdateAttributes() will be applied to the index files themselves. Example: $ searchd --config /home/myuser/sphinx.conf --stop is used to synchronously stop searchd. essentially tells the running instance to exit (by sending it a SIGTERM) and then immediately returns. will also attempt to wait until the running searchd instance actually finishes the shutdown (eg. saves all the pending attribute changes) and exits. Example: $ searchd --config /home/myuser/sphinx.conf --stopwait Possible exit codes are as follows: 0 on success; 1 if connection to running searchd daemon failed; 2 if daemon reported an error during shutdown; 3 if daemon crashed during shutdown. command is used to query running searchd instance status, using the connection details from the (optionally) provided configuration file. It will try to connect to the running instance using the first configured UNIX socket or TCP port. On success, it will query for a number of status and performance counter values and print them. You can use Status() API call to access the very same counters from your application. Examples: $ searchd --status $ searchd --config /home/myuser/sphinx.conf --status is used to explicitly state a PID file, where the process information is stored regarding searchd, used for inter-process communications (for example, indexer will need to know the PID to contact searchd for rotating indexes). Normally, searchd would use a PID if running in regular mode (i.e. not with ), but it is possible that you will be running it in console mode whilst the index is being updated and rotated, for which a PID file will be needed. $ searchd --config /home/myuser/sphinx.conf --pidfile /home/myuser/sphinx.pid is used to force searchd into console mode; typically it will be running as a conventional server application, and will aim to dump information into the log files (as specified in sphinx.conf). Sometimes though, when debugging issues in the configuration or the daemon itself, or trying to diagnose hard-to-track-down problems, it may be easier to force it to dump information directly to the console/command line from which it is being called. Running in console mode also means that the process will not be forked (so searches are done in sequence) and logs will not be written to. (It should be noted that console mode is not the intended method for running searchd.) You can invoke it as such: $ searchd --config /home/myuser/sphinx.conf --console enables additional debug output in the daemon log. Should only be needed rarely, to assist with debugging issues that could not be easily reproduced on request. is used in conjuction with the logging options (the will need to have been activated in sphinx.conf) to provide more detailed information on a per-query basis as to the input/output operations carried out in the course of that query, with a slight performance hit and of course bigger logs. Further details are available under the query log format section. You might start searchd thus: $ searchd --config /home/myuser/sphinx.conf --iostats is used to provide actual CPU time report (in addition to wall time) in both query log file (for every given query) and status report (aggregated). It depends on clock_gettime() system call and might therefore be unavailable on certain systems. You might start searchd thus: $ searchd --config /home/myuser/sphinx.conf --cpustats ( for short) is used to specify the port that searchd should listen on, usually for debugging purposes. This will usually default to 9312, but sometimes you need to run it on a different port. Specifying it on the command line will override anything specified in the configuration file. The valid range is 0 to 65535, but ports numbered 1024 and below usually require a privileged account in order to run. An example of usage: $ searchd --port 9313 (or for short) Works as , but allow you to specify not only the port, but full path, as IP address and port, or Unix-domain socket path, that searchd will listen on. Otherwords, you can specify either an IP address (or hostname) and port number, or just a port number, or Unix socket path. If you specify port number but not the address, searchd will listen on all network interfaces. Unix path is identified by a leading slash. As the last param you can also specify a protocol handler (listener) to be used for connections on this socket. Supported protocol values are 'sphinx' (Sphinx 0.9.x API protocol) and 'mysql41' (MySQL protocol used since 4.1 upto at least 5.1). (or for short) forces this instance of searchd only to serve the specified index. Like , above, this is usually for debugging purposes; more long-term changes would generally be applied to the configuration file itself. Example usage: $ searchd --index myindex strips the path names from all the file names referenced from the index (stopwords, wordforms, exceptions, etc). This is useful for picking up indexes built on another machine with possibly different path layouts. switch, added in version 2.0.2-beta, can be used to specify a list of extra binary log replay options. The supported options are: , ignore descending transaction timestamps and replay such transactions anyway (the default behavior is to exit with an error). Example: $ searchd --replay-flags=accept-desc-timestamp There are some options for searchd that are specific to Windows platforms, concerning handling as a service, are only be available on Windows binaries. Note that on Windows searchd will default to mode, unless you install it as a service. installs searchd as a service into the Microsoft Management Console (Control Panel / Administrative Tools / Services). Any other parameters specified on the command line, where is specified will also become part of the command line on future starts of the service. For example, as part of calling searchd, you will likely also need to specify the configuration file with , and you would do that as well as specifying . Once called, the usual start/stop facilities will become available via the management console, so any methods you could use for starting, stopping and restarting services would also apply to searchd. Example: C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install --config C:\Sphinx\sphinx.conf If you wanted to have the I/O stats every time you started searchd, you would specify its option on the same line as the command thus: C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install --config C:\Sphinx\sphinx.conf --iostats removes the service from the Microsoft Management Console and other places where services are registered, after previously installed with . Note, this does not uninstall the software or delete the indexes. It means the service will not be called from the services systems, and will not be started on the machine's next start. If currently running as a service, the current instance will not be terminated (until the next reboot, or searchd is called with ). If the service was installed with a custom name (with ), the same name will need to be specified with when calling to uninstall. Example: C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --delete applies the given name to searchd when installing or deleting the service, as would appear in the Management Console; this will default to searchd, but if being deployed on servers where multiple administrators may log into the system, or a system with multiple searchd instances, a more descriptive name may be applicable. Note that unless combined with or , this option does not do anything. Example: C:\WINDOWS\system32> C:\Sphinx\bin\searchd.exe --install --config C:\Sphinx\sphinx.conf --servicename SphinxSearch is the option that is passed by the Management Console to searchd to invoke it as a service on Windows platforms. It would not normally be necessary to call this directly; this would normally be called by Windows when the service would be started, although if you wanted to call this as a regular service from the command-line (as the complement to ) you could do so in theory. Last but not least, as every other daemon, searchd supports a number of signals. SIGTERM Initiates a clean shutdown. New queries will not be handled; but queries that are already started will not be forcibly interrupted. SIGHUP Initiates index rotation. Depending on the value of seamless_rotate setting, new queries might be shortly stalled; clients will receive temporary errors. SIGUSR1 Forces reopen of searchd log and query log files, letting you implement log file rotation. <filename>search</filename> command reference search is one of the helper tools within the Sphinx package. Whereas searchd is responsible for searches in a server-type environment, search is aimed at testing the index from the command line, and testing the index quickly without building a framework to make the connection to the server and process its response. Note: search is not intended to be deployed as part of a client application; it is strongly recommended you do not write an interface to search instead of searchd, and none of the bundled client APIs support this method. (In any event, search will reload files each time, whereas searchd will cache them in memory for performance.) That said, many types of query that you could build in the APIs could also be made with search, however for very complex searches it may be easier to construct them using a small script and the corresponding API. Additionally, some newer features may be available in the searchd system that have not yet been brought into search. The calling syntax for search is as follows: search [OPTIONS] word1 [word2 [word3 [...]]] When calling search, it is not necessary to have searchd running; simply make sure that the account running the search program has read access to the configuration file and the index files. The default behaviour is to apply a search for word1 (AND word2 AND word3... as specified) to all fields in all indexes as given in the configuration file. If constructing the equivalent in the API, this would be the equivalent to passing to SetMatchMode, and specifying as the indexes to query as part of Query. There are many options available to search. Firstly, the general options: ( for short) tells search to use the given file as its configuration, just as with indexer above. ( for short) tells search to limit searching to the specified index only; normally it would attempt to search all of the physical indexes listed in sphinx.conf, not any distributed ones. tells search to accept the query from the standard input, rather than the command line. This can be useful for testing purposes whereby you could feed input via pipes and from scripts. Options for setting matches: ( for short) changes the matching mode to match any of the words as part of the query (word1 OR word2 OR word3). In the API this would be equivalent to passing to SetMatchMode. ( for short) changes the matching mode to match all of the words as part of the query, and do so in the phrase given (not including punctuation). In the API this would be equivalent to passing to SetMatchMode. ( for short) changes the matching mode to Boolean matching. Note if using Boolean syntax matching on the command line, you may need to escape the symbols (with a backslash) to avoid the shell/command line processor applying them, such as ampersands being escaped on a Unix/Linux system to avoid it forking to the search process, although this can be resolved by using , as below. In the API this would be equivalent to passing to SetMatchMode. ( for short) changes the matching mode to extended matching which provides various text querying operators. In the API this would be equivalent to passing to SetMatchMode. ( for short) filters the results such that only documents where the attribute given (attr) matches the value given (v). For example, only matches documents with an attribute called 'deleted' where its value is 0. You can also add multiple filters on the command line, by specifying multiple multiple times, however if you apply a second filter to an attribute it will override the first defined filter. Options for handling the results: ( for short) limits the total number of matches back to the number given. If a 'group' is specified, this will be the number of grouped results. This defaults to 20 results if not specified (as do the APIs) ( for short) offsets the result list by the number of places set by the count; this would be used for pagination through results, where if you have 20 results per 'page', the second page would begin at offset 20, the third page at offset 40, etc. ( for short) specifies that results should be grouped together based on the attribute specified. Like the GROUP BY clause in SQL, it will combine all results where the attribute given matches, and returns a set of results where each returned result is the best from each group. Unless otherwise specified, this will be the best match on relevance. ( for short) instructs that when results are grouped with , the expression given in <expr> shall determine the order of the groups. Note, this does not specify which is the best item within the group, only the order in which the groups themselves shall be returned. ( for short) specifies that results should be sorted in the order listed in <clause>. This allows you to specify the order you wish results to be presented in, ordering by different columns. For example, you could say to sort entries first by weight (or relevance) and where two or more entries have the same weight, to then sort by the time with the highest time (newest) first. You will usually need to put the items in quotes () or use commas () to avoid the items being treated separately. Additionally, like the regular sorting modes, if (grouping) is being used, this will state how to establish the best match within each group. ( for short) specifies that the search results should be presented in an order determined by an arithmetic expression, stated in expr. For example: (again noting that this will have to be quoted to avoid the shell dealing with the asterisk). Extended sort mode is discussed in more detail under the entry under the Sorting modes section of the manual. specifies that the results should be sorted by descending (i.e. most recent first) date. This requires that there is an attribute in the index that is set as a timestamp. specifies that the results should be sorted by ascending (i.e. oldest first) date. This requires that there is an attribute in the index that is set as a timestamp. specifies that the results should be sorted by timestamp in groups; it will return all of the documents whose timestamp is within the last hour, then sorted within that bracket for relevance. After, it would return the documents from the last day, sorted by relevance, then the last week and then the last month. It is discussed in more detail under the entry under the Sorting modes section of the manual. Other options: ( for short) instructs search not to look-up data in your SQL database. Specifically, for debugging with MySQL and search, you can provide it with a query to look up the full article based on the returned document ID. It is explained in more detail under the sql_query_info directive. <filename>spelldump</filename> command reference spelldump is one of the helper tools within the Sphinx package. It is used to extract the contents of a dictionary file that uses ispell or MySpell format, which can help build word lists for wordforms - all of the possible forms are pre-built for you. Its general usage is: spelldump [options] <dictionary> <affix> [result] [locale-name] The two main parameters are the dictionary's main file and its affix file; usually these are named as [language-prefix].dict and [language-prefix].aff and will be available with most common Linux distributions, as well as various places online. specifies where the dictionary data should be output to, and additionally specifies the locale details you wish to use. There is an additional option, , which specifies a file for case conversion details. Examples of its usage are: spelldump en.dict en.aff spelldump ru.dict ru.aff ru.txt ru_RU.CP1251 spelldump ru.dict ru.aff ru.txt .1251 The results file will contain a list of all the words in the dictionary in alphabetical order, output in the format of a wordforms file, which you can use to customise for your specific circumstances. An example of the result file: zone > zone zoned > zoned zoning > zoning <filename>indextool</filename> command reference indextool is one of the helper tools within the Sphinx package, introduced in version 0.9.9-rc2. It is used to dump miscellaneous debug information about the physical index. (Additional functionality such as index verification is planned in the future, hence the indextool name rather than just indexdump.) Its general usage is: indextool <command> [options] The only currently available option applies to all commands and lets you specify the configuration file: ( for short) overrides the built-in config file names. The commands are as follows: quickly dumps the provided index header file without touching any other index files or even the configuration file. The report provides a breakdown of all the index settings, in particular the entire attribute and field list. Prior to 0.9.9-rc2, this command was present in CLI search utility. dumps the index definition from the given index header file in (almost) compliant sphinx.conf file format. Added in version 2.0.1-beta. dumps index header by index name with looking up the header path in the configuration file. dumps document IDs by index name. It takes the data from attribute (.spa) file and therefore requires docinfo=extern to work. dumps all the hits (occurences) of a given keyword in a given index, with keyword specified as text. dumps all the hits (occurences) of a given keyword in a given index, with keyword specified as internal numeric ID. filters stdin using HTML stripper settings for a given index, and prints the filtering results to stdout. Note that the settings will be taken from sphinx.conf, and not the index header. checks the index data files for consistency errors that might be introduced either by bugs in indexer and/or hardware faults. Starting with version 2.0.2-beta, also works on RT indexes, but checks disk chunks only. strips the path names from all the file names referenced from the index (stopwords, wordforms, exceptions, etc). This is useful for checking indexes built on another machine with possibly different path layouts. SphinxQL reference SphinxQL is our SQL dialect that exposes all of the search daemon functionality using a standard SQL syntax with a few Sphinx-specific extensions. Everything available via the SphinxAPI is also available SphinxQL but not vice versa; for instance, writes into RT indexes are only available via SphinxQL. This chapter documents supported SphinxQL statements syntax. SELECT syntax SELECT select_expr [, select_expr ...] FROM index [, index2 ...] [WHERE where_condition] [GROUP BY {col_name | expr_alias}] [ORDER BY {col_name | expr_alias} {ASC | DESC} [, ...]] [WITHIN GROUP ORDER BY {col_name | expr_alias} {ASC | DESC}] [LIMIT offset, row_count] [OPTION opt_name = opt_value [, ...]] SELECT statement was introduced in version 0.9.9-rc2. It's syntax is based upon regular SQL but adds several Sphinx-specific extensions and has a few omissions (such as (currently) missing support for JOINs). Specifically, Column list clause. Column names, arbitrary expressions, and star ('*') are all allowed (ie. SELECT @id, group_id*123+456 AS expr1 FROM test1 will work). Unlike in regular SQL, all computed expressions must be aliased with a valid identifier. Starting with version 2.0.1-beta, AS is optional. Special names such as @id and @weight should currently be used with leading at-sign. This at-sign requirement will be lifted in the future. FROM clause. FROM clause should contain the list of indexes to search through. Unlike in regular SQL, comma means enumeration of full-text indexes as in Query() API call rather than JOIN. WHERE clause. This clause will map both to fulltext query and filters. Comparison operators (=, !=, <, >, <=, >=), IN, AND, NOT, and BETWEEN are all supported and map directly to filters. OR is not supported yet but will be in the future. MATCH('query') is supported and maps to fulltext query. Query will be interpreted according to full-text query language rules. There must be at most one MATCH() in the clause. Starting with version 2.0.1-beta, {col_name | expr_alias} [NOT] IN @uservar condition syntax is supported. (Refer to for a discussion of global user variables.) GROUP BY clause. Currently only supports grouping by a single column. The column however can be a computed expression: SELECT *, group_id*1000+article_type AS gkey FROM example GROUP BY gkey Aggregate functions (AVG(), MIN(), MAX(), SUM()) in column list clause are supported. Arguments to aggregate functions can be either plain attributes or arbitrary expressions. COUNT(*) is implicitly supported as using GROUP BY will add @count column to result set. Explicit support might be added in the future. COUNT(DISTINCT attr) is supported. Currently there can be at most one COUNT(DISTINCT) per query and an argument needs to be an attribute. Both current restrictions on COUNT(DISTINCT) might be lifted in the future. SELECT *, AVG(price) AS avgprice, COUNT(DISTINCT storeid) FROM products WHERE MATCH('ipod') GROUP BY vendorid Starting with 2.0.1-beta, GROUP BY on a string attribute is supported, with respect for current collation (see ). WITHIN GROUP ORDER BY clause. This is a Sphinx specific extension that lets you control how the best row within a group will to be selected. The syntax matches that of regular ORDER BY clause: SELECT *, INTERVAL(posted,NOW()-7*86400,NOW()-86400) AS timeseg FROM example WHERE MATCH('my search query') GROUP BY siteid WITHIN GROUP ORDER BY @weight DESC ORDER BY timeseg DESC, @weight DESC Starting with 2.0.1-beta, WITHIN GROUP ORDER BY on a string attribute is supported, with respect for current collation (see ). ORDER BY clause. Unlike in regular SQL, only column names (not expressions) are allowed and explicit ASC and DESC are required. The columns however can be computed expressions: SELECT *, @weight*10+docboost AS skey FROM example ORDER BY skey Starting with 2.0.1-beta, ORDER BY on a string attribute is supported, with respect for current collation (see ). Starting with 2.0.2-beta, ORDER BY RAND() syntax is supported. Note that this syntax is actually going to randomize the weight values and then order matches by those randomized weights. LIMIT clause. Both LIMIT N and LIMIT M,N forms are supported. Unlike in regular SQL (but like in Sphinx API), an implicit LIMIT 0,20 is present by default. OPTION clause. This is a Sphinx specific extension that lets you control a number of per-query options. The syntax is: OPTION <optionname>=<value> [ , ... ] Supported options and respectively allowed values are: 'ranker' - any of 'proximity_bm25', 'bm25', 'none', 'wordcount', 'proximity', 'matchany', or 'fieldmask' 'max_matches' - integer (per-query max matches value) 'cutoff' - integer (max found matches threshold) 'max_query_time' - integer (max search time threshold, msec) 'retry_count' - integer (distributed retries count) 'retry_delay' - integer (distributed retry delay, msec) 'field_weights' - a named integer list (per-field user weights for ranking) 'index_weights' - a named integer list (per-index user weights for ranking) 'reverse_scan' - 0 or 1, lets you control the order in which full-scan query processes the rows 'comment' - string, user comment that gets copied to a query log file Example: SELECT * FROM test WHERE MATCH('@title hello @body world') OPTION ranker=bm25, max_matches=3000, field_weights=(title=10, body=3) SHOW META syntax SHOW META SHOW META shows additional meta-information about the latest query such as query time and keyword statistics: mysql> SELECT * FROM test1 WHERE MATCH('test|one|two'); +------+--------+----------+------------+ | id | weight | group_id | date_added | +------+--------+----------+------------+ | 1 | 3563 | 456 | 1231721236 | | 2 | 2563 | 123 | 1231721236 | | 4 | 1480 | 2 | 1231721236 | +------+--------+----------+------------+ 3 rows in set (0.01 sec) mysql> SHOW META; +---------------+-------+ | Variable_name | Value | +---------------+-------+ | total | 3 | | total_found | 3 | | time | 0.005 | | keyword[0] | test | | docs[0] | 3 | | hits[0] | 5 | | keyword[1] | one | | docs[1] | 1 | | hits[1] | 2 | | keyword[2] | two | | docs[2] | 1 | | hits[2] | 2 | +---------------+-------+ 12 rows in set (0.00 sec) SHOW WARNINGS syntax SHOW WARNINGS SHOW WARNINGS statement, introduced in version 0.9.9-rc2, can be used to retrieve the warning produced by the latest query. The error message will be returned along with the query itself: mysql> SELECT * FROM test1 WHERE MATCH('@@title hello') \G ERROR 1064 (42000): index test1: syntax error, unexpected TOK_FIELDLIMIT near '@title hello' mysql> SELECT * FROM test1 WHERE MATCH('@title -hello') \G ERROR 1064 (42000): index test1: query is non-computable (single NOT operator) mysql> SELECT * FROM test1 WHERE MATCH('"test doc"/3') \G *************************** 1. row *************************** id: 4 weight: 2500 group_id: 2 date_added: 1231721236 1 row in set, 1 warning (0.00 sec) mysql> SHOW WARNINGS \G *************************** 1. row *************************** Level: warning Code: 1000 Message: quorum threshold too high (words=2, thresh=3); replacing quorum operator with AND operator 1 row in set (0.00 sec) SHOW STATUS syntax SHOW STATUS, introduced in version 0.9.9-rc2, displays a number of useful performance counters. IO and CPU counters will only be available if searchd was started with --iostats and --cpustats switches respectively. mysql> SHOW STATUS; +--------------------+-------+ | Variable_name | Value | +--------------------+-------+ | uptime | 216 | | connections | 3 | | maxed_out | 0 | | command_search | 0 | | command_excerpt | 0 | | command_update | 0 | | command_keywords | 0 | | command_persist | 0 | | command_status | 0 | | agent_connect | 0 | | agent_retry | 0 | | queries | 10 | | dist_queries | 0 | | query_wall | 0.075 | | query_cpu | OFF | | dist_wall | 0.000 | | dist_local | 0.000 | | dist_wait | 0.000 | | query_reads | OFF | | query_readkb | OFF | | query_readtime | OFF | | avg_query_wall | 0.007 | | avg_query_cpu | OFF | | avg_dist_wall | 0.000 | | avg_dist_local | 0.000 | | avg_dist_wait | 0.000 | | avg_query_reads | OFF | | avg_query_readkb | OFF | | avg_query_readtime | OFF | +--------------------+-------+ 29 rows in set (0.00 sec) INSERT and REPLACE syntax {INSERT | REPLACE} INTO index [(column, ...)] VALUES (value, ...) [, (...)] INSERT statement, introduced in version 1.10-beta, is only supported for RT indexes. It inserts new rows (documents) into an existing index, with the provided column values. ID column must be present in all cases. Rows with duplicate IDs will not be overwritten by INSERT; use REPLACE to do that. is the name of RT index into which the new row(s) should be inserted. The optional column names list lets you only explicitly specify values for some of the columns present in the index. All the other columns will be filled with their default values (0 for scalar types, empty string for text types). Expressions are not currently supported in INSERT and values should be explicitly specified. Multiple rows can be inserted using a single INSERT statement by providing several comma-separated, parens-enclosed lists of rows values. DELETE syntax DELETE FROM index WHERE {id = value | id IN (val1 [, val2 [, ...]])} DELETE statement, introduced in version 1.10-beta, is only supported for RT indexes. It deletes existing rows (documents) from an existing index based on ID. is the name of RT index from which the row should be deleted. is the row ID to be deleted. Support for batch id IN (2,3,5) syntax was added in version 2.0.1-beta. Additional types of WHERE conditions (such as conditions on attributes, etc) are planned, but not supported yet as of 1.10-beta. SET syntax SET [GLOBAL] server_variable_name = value SET GLOBAL @user_variable_name = (int_val1 [, int_val2, ...]) SET NAMES value SET @@dummy_variable = ignored_value SET statement, introduced in version 1.10-beta, modifies a variable value. The variable names are case-insensitive. No variable value changes survive server restart. SET NAMES statement and SET @@variable_name syntax, both introduced in version 2.0.2-beta, do nothing. They were implemented to maintain compatibility with 3rd party MySQL client libraries, connectors, and frameworks that may need to run this statement when connecting. There are the following classes of the variables: per-session server variable (1.10-beta and above) global server variable (2.0.1-beta and above) global user variable (2.0.1-beta and above) Global user variables are shared between concurrent sessions. Currently, the only supported value type is the list of BIGINTs, and these variables can only be used along with IN() for filtering purpose. The intended usage scenario is uploading huge lists of values to searchd (once) and reusing them (many times) later, saving on network overheads. Example: // in session 1 mysql> SET GLOBAL @myfilter=(2,3,5,7,11,13); Query OK, 0 rows affected (0.00 sec) // later in session 2 mysql> SELECT * FROM test1 WHERE group_id IN @myfilter; +------+--------+----------+------------+-----------------+------+ | id | weight | group_id | date_added | title | tag | +------+--------+----------+------------+-----------------+------+ | 3 | 1 | 2 | 1299338153 | another doc | 15 | | 4 | 1 | 2 | 1299338153 | doc number four | 7,40 | +------+--------+----------+------------+-----------------+------+ 2 rows in set (0.02 sec) Per-session and global server variables affect certain server settings in the respective scope. Known per-session server variables are: AUTOCOMMIT = {0 | 1} Whether any data modification statement should be implicitly wrapped by BEGIN and COMMIT. Introduced in version 1.10-beta. COLLATION_CONNECTION = collation_name Selects the collation to be used for ORDER BY or GROUP BY on string values in the subsequent queries. Refer to for a list of known collation names. Introduced in version 2.0.1-beta. CHARACTER_SET_RESULTS = charset_name Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.1-beta. SQL_AUTO_IS_NULL = value Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.2-beta. SQL_MODE = value Does nothing; a placeholder to support frameworks, clients, and connectors that attempt to automatically enforce a charset when connecting to a Sphinx server. Introduced in version 2.0.2-beta. Known global server variables are: QUERY_LOG_FORMAT = {plain | sphinxql} Changes the current log format. Introduced in version 2.0.1-beta. LOG_LEVEL = {info | debug | debugv | debugvv} Changes the current log verboseness level. Introduced in version 2.0.1-beta. Examples: mysql> SET autocommit=0; Query OK, 0 rows affected (0.00 sec) mysql> SET GLOBAL query_log_format=sphinxql; Query OK, 0 rows affected (0.00 sec) SET TRANSACTION syntax SET TRANSACTION ISOLATION LEVEL { READ UNCOMMITTED | READ COMMITTED | REPEATABLE READ | SERIALIZABLE } SET TRANSACTION statement, introduced in version 2.0.2-beta, does nothing. It was implemented to maintain compatibility with 3rd party MySQL client libraries, connectors, and frameworks that may need to run this statement when connecting. Example: mysql> SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED; Query OK, 0 rows affected (0.00 sec) BEGIN, COMMIT, and ROLLBACK syntax START TRANSACTION | BEGIN COMMIT ROLLBACK SET AUTOCOMMIT = {0 | 1} BEGIN, COMMIT, and ROLLBACK statements were introduced in version 1.10-beta. BEGIN statement (or its START TRANSACTION alias) forcibly commits pending transaction, if any, and begins a new one. COMMIT statement commits the current transaction, making all its changes permanent. ROLLBACK statement rolls back the current transaction, canceling all its changes. SET AUTOCOMMIT controls the autocommit mode in the active session. AUTOCOMMIT is set to 1 by default, meaning that every statement that perfoms any changes on any index is implicitly wrapped in BEGIN and COMMIT. Transactions are limited to a single RT index, and also limited in size. They are atomic, consistent, overly isolated, and durable. Overly isolated means that the changes are not only invisible to the concurrent transactions but even to the current session itself. CALL SNIPPETS syntax CALL SNIPPETS(data, index, query[, opt_value AS opt_name[, ...]]) CALL SNIPPETS statement, introduced in version 1.10-beta, builds a snippet from provided data and query, using specified index settings. is the source data to extract a snippet from. It could be a single string, or the list of the strings enclosed in curly brackets. is the name of the index from which to take the text processing settings. is the full-text query to build snippets for. Additional options are documented in . Usage example: CALL SNIPPETS('this is my document text', 'test1', 'hello world', 5 AS around, 200 AS limit); CALL SNIPPETS(('this is my document text','this is my another text'), 'test1', 'hello world', 5 AS around, 200 AS limit); CALL SNIPPETS(('data/doc1.txt','data/doc2.txt','/home/sphinx/doc3.txt'), 'test1', 'hello world', 5 AS around, 200 AS limit, 1 AS load_files); CALL KEYWORDS syntax CALL KEYWORDS(text, index, [hits]) CALL KEYWORDS statement, introduced in version 1.10-beta, splits text into particular keywords. It returns tokenized and normalized forms of the keywords, and, optionally, keyword statistics. is the text to break down to keywords. is the name of the index from which to take the text processing settings. is an optional boolean parameter that specifies whether to return document and hit occurrence statistics. SHOW TABLES syntax SHOW TABLES SHOW TABLES statement, introduced in version 2.0.1-beta, enumerates all currently active indexes along with their types. As of 2.0.1-beta, existing index types are , , and respectively. Example: mysql> SHOW TABLES; +-------+-------------+ | Index | Type | +-------+-------------+ | dist1 | distributed | | rt | rt | | test1 | local | | test2 | local | +-------+-------------+ 4 rows in set (0.00 sec) DESCRIBE syntax {DESC | DESCRIBE} index DESCRIBE statement, introduced in version 2.0.1-beta, lists index columns and their associated types. Columns are document ID, full-text fields, and attributes. The order matches that in which fields and attributes are expected by INSERT and REPLACE statements. As of 2.0.1-beta, column types are , , , , , , , , and . ID column will be typed either or based on whether the binaries were built with 32-bit or 64-bit document ID support. Example: mysql> DESC rt; +---------+---------+ | Field | Type | +---------+---------+ | id | integer | | title | field | | content | field | | gid | integer | +---------+---------+ 4 rows in set (0.00 sec) CREATE FUNCTION syntax CREATE FUNCTION udf_name RETURNS {INT | BIGINT | FLOAT} SONAME 'udf_lib_file' CREATE FUNCTION statement, introduced in version 2.0.1-beta, installs a user-defined function (UDF) with the given name and type from the given library file. The library file must reside in a trusted plugin_dir directory. On success, the function is available for use in all subsequent queries that the server receives. Example: mysql> CREATE FUNCTION avgmva RETURNS INT SONAME 'udfexample.dll'; Query OK, 0 rows affected (0.03 sec) mysql> SELECT *, AVGMVA(tag) AS q from test1; +------+--------+---------+-----------+ | id | weight | tag | q | +------+--------+---------+-----------+ | 1 | 1 | 1,3,5,7 | 4.000000 | | 2 | 1 | 2,4,6 | 4.000000 | | 3 | 1 | 15 | 15.000000 | | 4 | 1 | 7,40 | 23.500000 | +------+--------+---------+-----------+ DROP FUNCTION syntax DROP FUNCTION udf_name DROP FUNCTION statement, introduced in version 2.0.1-beta, deinstalls a user-defined function (UDF) with the given name. On success, the function is no longer available for use in subsequent queries. Pending concurrent queries will not be affected and the library unload, if necessary, will be postponed until those queries complete. Example: mysql> DROP FUNCTION avgmva; Query OK, 0 rows affected (0.00 sec) SHOW VARIABLES syntax SHOW [{GLOBAL | SESSION}] VARIABLES SHOW VARIABLES statement was added in version 2.0.1-beta to improve compatibility with 3rd party MySQL connectors and frameworks that automatically execute this statement. In version 2.0.1-beta, it did nothing. Starting from version 2.0.2-beta, it returns the current values of a few server-wide variables. Also, support for GLOBAL and SESSION clauses was added. mysql> SHOW GLOBAL VARIABLES; +----------------------+----------+ | Variable_name | Value | +----------------------+----------+ | autocommit | 1 | | collation_connection | libc_ci | | query_log_format | sphinxql | | log_level | info | +----------------------+----------+ 4 rows in set (0.00 sec) SHOW COLLATION syntax SHOW COLLATION Added in version 2.0.1-beta, this is currently a placeholder query that does nothing and reports success. That is in order to keep compatibility with frameworks and connectors that automatically execute this statement. mysql> SHOW COLLATION; Query OK, 0 rows affected (0.00 sec) UPDATE syntax UPDATE index SET col1 = newval1 [, ...] WHERE where_condition UPDATE statement was added in version 2.0.1-beta. Multiple attributes and values can be specified in a single statement. Both RT and disk indexes are supported. As of version 2.0.2-beta, all atributes types (int, bigint, float, MVA) except for strings can be updated. Previously, some of the types were not supported. where_condition (also added in 2.0.2-beta) has the same syntax as in the SELECT statement (see for details). When assigning the out-of-range values to 32-bit attributes, they will be trimmed to their lower 32 bits without a prompt. For example, if you try to update the 32-bit unsigned int with a value of 4294967297, the value of 1 will actually be stored, because the lower 32 bits of 4294967297 (0x100000001 in hex) amount to 1 (0x00000001 in hex). MVA values sets for updating (and also for INSERT or REPLACE, refer to ) must be specificed as comma-separated lists in parentheses. To erase the MVA value, just assign () to it. mysql> UPDATE myindex SET enabled=0 WHERE id=123; Query OK, 1 rows affected (0.00 sec) mysql> UPDATE myindex SET bigattr=-100000000000, fattr=3465.23, mvattr1=(3,6,4), mvattr2=() WHERE MATCH('hehe') AND enabled=1; Query OK, 148 rows affected (0.01 sec) ATTACH INDEX syntax ATTACH INDEX diskindex TO RTINDEX rtindex ATTACH INDEX statement, added in version 2.0.2-beta, lets you move data from a regular disk index to a RT index. After a successful ATTACH, the data originally stored in the source disk index becomes a part of the target RT index, and the source disk index becomes unavailable (until the next rebuild). ATTACH does not result in any index data changes. Basically, it just renames the files (making the source index a new disk chunk of the target RT index), and updates the metadata. So it is a generally quick operation which might (frequently) complete as fast as under a second. Note that when an index is attached to an empty RT index, the fields, attributes, and text processing settings (tokenizer, wordforms, etc) from the source index are copied over and take effect. The respective parts of the RT index definition from the configuration file will be ignored. As of 2.0.2-beta, ATTACH INDEX comes with a number of restrictions. Most notably, the target RT index is currently required to be empty, making ATTACH INDEX a one-time conversion operation only. Those restrictions may be lifted in future releases, as we add the needed functionality to the RT indexes. The complete list is as follows. Target RT index needs to be empty. Source disk index needs to have index_sp=0, boundary_step=0, stopword_step=1, dict=crc settings. Source disk index needs to have an empty index_zones setting. mysql> DESC rt; +-----------+---------+ | Field | Type | +-----------+---------+ | id | integer | | testfield | field | | testattr | uint | +-----------+---------+ 3 rows in set (0.00 sec) mysql> SELECT * FROM rt; Empty set (0.00 sec) mysql> SELECT * FROM disk WHERE MATCH('test'); +------+--------+----------+------------+ | id | weight | group_id | date_added | +------+--------+----------+------------+ | 1 | 1304 | 1 | 1313643256 | | 2 | 1304 | 1 | 1313643256 | | 3 | 1304 | 1 | 1313643256 | | 4 | 1304 | 1 | 1313643256 | +------+--------+----------+------------+ 4 rows in set (0.00 sec) mysql> ATTACH INDEX disk TO RTINDEX rt; Query OK, 0 rows affected (0.00 sec) mysql> DESC rt; +------------+-----------+ | Field | Type | +------------+-----------+ | id | integer | | title | field | | content | field | | group_id | uint | | date_added | timestamp | +------------+-----------+ 5 rows in set (0.00 sec) mysql> SELECT * FROM rt WHERE MATCH('test'); +------+--------+----------+------------+ | id | weight | group_id | date_added | +------+--------+----------+------------+ | 1 | 1304 | 1 | 1313643256 | | 2 | 1304 | 1 | 1313643256 | | 3 | 1304 | 1 | 1313643256 | | 4 | 1304 | 1 | 1313643256 | +------+--------+----------+------------+ 4 rows in set (0.00 sec) mysql> SELECT * FROM disk WHERE MATCH('test'); ERROR 1064 (42000): no enabled local indexes to search FLUSH RTINDEX syntax FLUSH RTINDEX rtindex FLUSH RTINDEX statement, added in version 2.0.2-beta, forcibly flushes RT index RAM chunk contents to disk. Backing up a RT index is as simple as copying over its data files, followed by the binary log. However, recovering from that backup means that all the transactions in the log since the last successful RAM chunk write would need to be replayed. Those writes normally happen either on a clean shutdown, or periodically with a (big enough!) interval between writes specified in rt_flush_period directive. So such a backup made at an arbitrary point in time just might end up with way too much binary log data to replay. FLUSH RTINDEX forcibly writes the RAM chunk contents to disk, and also causes the subsequent cleanup of (now-redundant) binary log files. Thus, recovering from a backup made just after FLUSH RTINDEX should be almost instant. mysql> FLUSH RTINDEX rt; Query OK, 0 rows affected (0.05 sec) Multi-statement queries Starting version 2.0.1-beta, SphinxQL supports multi-statement queries, or batches. Possible inter-statement optimizations described in do apply to SphinxQL just as well. The batched queries should be separated by a semicolon. Your MySQL client library needs to support MySQL multi-query mechanism and multiple result set. For instance, mysqli interface in PHP and DBI/DBD libraries in Perl are known to work. Here's a PHP sample showing how to utilize mysqli interface with Sphinx. Its output with the sample test1 index included with Sphinx is as follows. $ php test_multi.php id=1 id=2 id=3 id=4 ------ id=3 id=4 id=1 id=2 The following statements can currently be used in a batch: SELECT, SHOW WARNINGS, SHOW STATUS, and SHOW META. Arbitrary sequence of these statements are allowed. The results sets returned should match those that would be returned if the batched queries were sent one by one. Comment syntax Since version 2.0.1-beta, SphinxQL supports C-style comment syntax. Everything from an opening /* sequence to a closing */ sequence is ignored. Comments can span multiple lines, can not nest, and should not get logged. MySQL specific /*! ... */ comments are also currently ignored. (As the comments support was rather added for better compatibility with mysqldump produced dumps, rather than improving generaly query interoperability between Sphinx and MySQL.) SELECT /*! SQL_CALC_FOUND_ROWS */ col1 FROM table1 WHERE ... List of SphinxQL reserved keywords A complete alphabetical list of keywords that are currently reserved in SphinxQL syntax (and therefore can not be used as identifiers). AND AS ASC AVG BEGIN BETWEEN BY CALL COLLATION COMMIT COUNT DELETE DESC DESCRIBE DISTINCT FALSE FROM GLOBAL GROUP ID IN INSERT INTO LIMIT MATCH MAX META MIN NOT NULL OPTION OR ORDER REPLACE ROLLBACK SELECT SET SHOW START STATUS SUM TABLES TRANSACTION TRUE UPDATE VALUES VARIABLES WARNINGS WEIGHT WHERE WITHIN SphinxQL upgrade notes, version 2.0.1-beta This section only applies to existing applications that use SphinxQL versions prior to 2.0.1-beta. In previous versions, SphinxQL just wrapped around SphinxAPI and inherited its magic columns and column set quirks. Essentially, SphinxQL queries could return (slightly) different columns and in a (slightly) different order than it was explicitly requested in the query. Namely, weight magic column (which is not a real column in any index) was added at all times, and GROUP BY related @count, @group, and @distinct magic columns were conditionally added when grouping. Also, the order of columns (attributes) in the result set was actually taken from the index rather than the query. (So if you asked for columns C, B, A in your query but they were in the A, B, C order in the index, they would have been returned in the A, B, C order.) In version 2.0.1-beta, we fixed that. SphinxQL is now more SQL compliant (and will be further brought in as much compliance with standard SQL syntax as possible). That is not yet a breaking change, because searchd now supports compat_sphinxql_magics directive that flips between the old "compatibility" mode and the new "compliance" mode. However, the compatibility mode support is going to be removed in future, so it's strongly advised to update SphinxQL applications and switch to the compliance mode. The important changes are as follows: @ID magic name is deprecated in favor of ID. Document ID is considered an attribute. WEIGHT is no longer implicitly returned, because it is not actually a column (an index attribute), but rather an internal function computed per each row (a match). You have to explicitly ask for it, using the WEIGHT() function. (The requirement to alias the result will be lifted in the next release.) SELECT id, WEIGHT() w FROM myindex WHERE MATCH('test') You can now use quoted reserved keywords as aliases. The quote character is backtick ("`", ASCII code 96 decimal, 60 hex). One particularly useful example would be returning weight column like the old mode: SELECT id, WEIGHT() `weight` FROM myindex WHERE MATCH('test') The column order is now different and should now match the one expliclitly defined in the query. So if you are accessing columns based on their position in the result set rather than the name (for instance, by using mysql_fetch_row() rather than mysql_fetch_assoc() in PHP), check and fix the order of columns in your queries. SELECT * return the columns in index order, as it used to, including the ID column. However, SELECT * does not automatically return WEIGHT(). To update such queries in case you access columns by names, simply add it to the query: SELECT *, WEIGHT() `weight` FROM myindex WHERE MATCH('test') Otherwise, i.e., in case you rely on column order, select ID, weight, and then other columns: SELECT id, *, WEIGHT() `weight` FROM myindex WHERE MATCH('test') Magic @count and @distinct attributes are no longer implicitly returned. You now have to explicitly ask for them when using GROUP BY. (Also note that you currently have to alias them; that requirement will be lifted in the future.) SELECT gid, COUNT(*) q FROM myindex WHERE MATCH('test') GROUP BY gid ORDER BY q DESC API reference There is a number of native searchd client API implementations for Sphinx. As of time of this writing, we officially support our own PHP, Python, and Java implementations. There also are third party free, open-source API implementations for Perl, Ruby, and C++. The reference API implementation is in PHP, because (we believe) Sphinx is most widely used with PHP than any other language. This reference documentation is in turn based on reference PHP API, and all code samples in this section will be given in PHP. However, all other APIs provide the same methods and implement the very same network protocol. Therefore the documentation does apply to them as well. There might be minor differences as to the method naming conventions or specific data structures used. But the provided functionality must not differ across languages. General API functions GetLastError Prototype: function GetLastError() Returns last error message, as a string, in human readable format. If there were no errors during the previous API call, empty string is returned. You should call it when any other function (such as Query()) fails (typically, the failing function returns false). The returned string will contain the error description. The error message is not reset by this call; so you can safely call it several times if needed. GetLastWarning Prototype: function GetLastWarning () Returns last warning message, as a string, in human readable format. If there were no warnings during the previous API call, empty string is returned. You should call it to verify whether your request (such as Query()) was completed but with warnings. For instance, search query against a distributed index might complete succesfully even if several remote agents timed out. In that case, a warning message would be produced. The warning message is not reset by this call; so you can safely call it several times if needed. SetServer Prototype: function SetServer ( $host, $port ) Sets searchd host name and TCP port. All subsequent requests will use the new host and port settings. Default host and port are 'localhost' and 9312, respectively. SetRetries Prototype: function SetRetries ( $count, $delay=0 ) Sets distributed retry count and delay. On temporary failures searchd will attempt up to $count retries per agent. $delay is the delay between the retries, in milliseconds. Retries are disabled by default. Note that this call will not make the API itself retry on temporary failure; it only tells searchd to do so. Currently, the list of temporary failures includes all kinds of connect() failures and maxed out (too busy) remote agents. SetConnectTimeout Prototype: function SetConnectTimeout ( $timeout ) Sets the time allowed to spend connecting to the server before giving up. Under some circumstances, the server can be delayed in responding, either due to network delays, or a query backlog. In either instance, this allows the client application programmer some degree of control over how their program interacts with searchd when not available, and can ensure that the client application does not fail due to exceeding the script execution limits (especially in PHP). In the event of a failure to connect, an appropriate error code should be returned back to the application in order for application-level error handling to advise the user. SetArrayResult Prototype: function SetArrayResult ( $arrayresult ) PHP specific. Controls matches format in the search results set (whether matches should be returned as an array or a hash). $arrayresult argument must be boolean. If $arrayresult is false (the default mode), matches will returned in PHP hash format with document IDs as keys, and other information (weight, attributes) as values. If $arrayresult is true, matches will be returned as a plain array with complete per-match information including document ID. Introduced along with GROUP BY support on MVA attributes. Group-by-MVA result sets may contain duplicate document IDs. Thus they need to be returned as plain arrays, because hashes will only keep one entry per document ID. IsConnectError Prototype: function IsConnectError () Checks whether the last error was a network error on API side, or a remote error reported by searchd. Returns true if the last connection attempt to searchd failed on API side, false otherwise (if the error was remote, or there were no connection attempts at all). Introduced in version 0.9.9-rc1. General query settings SetLimits Prototype: function SetLimits ( $offset, $limit, $max_matches=0, $cutoff=0 ) Sets offset into server-side result set ($offset) and amount of matches to return to client starting from that offset ($limit). Can additionally control maximum server-side result set size for current query ($max_matches) and the threshold amount of matches to stop searching at ($cutoff). All parameters must be non-negative integers. First two parameters to SetLimits() are identical in behavior to MySQL LIMIT clause. They instruct searchd to return at most $limit matches starting from match number $offset. The default offset and limit settings are 0 and 20, that is, to return first 20 matches. max_matches setting controls how much matches searchd will keep in RAM while searching. All matching documents will be normally processed, ranked, filtered, and sorted even if max_matches is set to 1. But only best N documents are stored in memory at any given moment for performance and RAM usage reasons, and this setting controls that N. Note that there are two places where max_matches limit is enforced. Per-query limit is controlled by this API call, but there also is per-server limit controlled by max_matches setting in the config file. To prevent RAM usage abuse, server will not allow to set per-query limit higher than the per-server limit. You can't retrieve more than max_matches matches to the client application. The default limit is set to 1000. Normally, you must not have to go over this limit. One thousand records is enough to present to the end user. And if you're thinking about pulling the results to application for further sorting or filtering, that would be much more efficient if performed on Sphinx side. $cutoff setting is intended for advanced performance control. It tells searchd to forcibly stop search query once $cutoff matches had been found and processed. SetMaxQueryTime Prototype: function SetMaxQueryTime ( $max_query_time ) Sets maximum search query time, in milliseconds. Parameter must be a non-negative integer. Default valus is 0 which means "do not limit". Similar to $cutoff setting from SetLimits(), but limits elapsed query time instead of processed matches count. Local search queries will be stopped once that much time has elapsed. Note that if you're performing a search which queries several local indexes, this limit applies to each index separately. SetOverride Prototype: function SetOverride ( $attrname, $attrtype, $values ) Sets temporary (per-query) per-document attribute value overrides. Only supports scalar attributes. $values must be a hash that maps document IDs to overridden attribute values. Introduced in version 0.9.9-rc1. Override feature lets you "temporary" update attribute values for some documents within a single query, leaving all other queries unaffected. This might be useful for personalized data. For example, assume you're implementing a personalized search function that wants to boost the posts that the user's friends recommend. Such data is not just dynamic, but also personal; so you can't simply put it in the index because you don't want everyone's searches affected. Overrides, on the other hand, are local to a single query and invisible to everyone else. So you can, say, setup a "friends_weight" value for every document, defaulting to 0, then temporary override it with 1 for documents 123, 456 and 789 (recommended by exactly the friends of current user), and use that value when ranking. SetSelect Prototype: function SetSelect ( $clause ) Sets the select clause, listing specific attributes to fetch, and expressions to compute and fetch. Clause syntax mimics SQL. Introduced in version 0.9.9-rc1. SetSelect() is very similar to the part of a typical SQL query between SELECT and FROM. It lets you choose what attributes (columns) to fetch, and also what expressions over the columns to compute and fetch. A certain difference from SQL is that expressions must always be aliased to a correct identifier (consisting of letters and digits) using 'AS' keyword. SQL also lets you do that but does not require to. Sphinx enforces aliases so that the computation results can always be returned under a "normal" name in the result set, used in other clauses, etc. Everything else is basically identical to SQL. Star ('*') is supported. Functions are supported. Arbitrary amount of expressions is supported. Computed expressions can be used for sorting, filtering, and grouping, just as the regular attributes. Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported when using GROUP BY. Expression sorting () and geodistance functions () are now internally implemented using this computed expressions mechanism, using magic names '@expr' and '@geodist' respectively. Example: $cl->SetSelect ( "*, @weight+(user_karma+ln(pageviews))*0.1 AS myweight" ); $cl->SetSelect ( "exp_years, salary_gbp*{$gbp_usd_rate} AS salary_usd, IF(age>40,1,0) AS over40" ); $cl->SetSelect ( "*, AVG(price) AS avgprice" ); Full-text search query settings SetMatchMode Prototype: function SetMatchMode ( $mode ) Sets full-text query matching mode, as described in . Parameter must be a constant specifying one of the known modes. WARNING: (PHP specific) you must not take the matching mode constant name in quotes, that syntax specifies a string and is incorrect: $cl->SetMatchMode ( "SPH_MATCH_ANY" ); // INCORRECT! will not work as expected $cl->SetMatchMode ( SPH_MATCH_ANY ); // correct, works OK SetRankingMode Prototype: function SetRankingMode ( $ranker, $rankexpr="" ) Sets ranking mode (aka ranker). Only available in SPH_MATCH_EXTENDED matching mode. Parameter must be a constant specifying one of the known rankers. By default, in the EXTENDED matching mode Sphinx computes two factors which contribute to the final match weight. The major part is a phrase proximity value between the document text and the query. The minor part is so-called BM25 statistical function, which varies from 0 to 1 depending on the keyword frequency within document (more occurrences yield higher weight) and within the whole index (more rare keywords yield higher weight). However, in some cases you'd want to compute weight differently - or maybe avoid computing it at all for performance reasons because you're sorting the result set by something else anyway. This can be accomplished by setting the appropriate ranking mode. The list of the modes is available in . $rankexpr argument was added in version 2.0.2-beta. It lets you specify a ranking formula to use with the expression based ranker, that is, when $ranker is set to SPH_RANK_EXPR. In all other cases, $rankexpr is ignored. SetSortMode Prototype: function SetSortMode ( $mode, $sortby="" ) Set matches sorting mode, as described in . Parameter must be a constant specifying one of the known modes. WARNING: (PHP specific) you must not take the matching mode constant name in quotes, that syntax specifies a string and is incorrect: $cl->SetSortMode ( "SPH_SORT_ATTR_DESC" ); // INCORRECT! will not work as expected $cl->SetSortMode ( SPH_SORT_ATTR_ASC ); // correct, works OK SetWeights Prototype: function SetWeights ( $weights ) Binds per-field weights in the order of appearance in the index. DEPRECATED, use SetFieldWeights() instead. SetFieldWeights Prototype: function SetFieldWeights ( $weights ) Binds per-field weights by name. Parameter must be a hash (associative array) mapping string field names to integer weights. Match ranking can be affected by per-field weights. For instance, see for an explanation how phrase proximity ranking is affected. This call lets you specify what non-default weights to assign to different full-text fields. The weights must be positive 32-bit integers. The final weight will be a 32-bit integer too. Default weight value is 1. Unknown field names will be silently ignored. There is no enforced limit on the maximum weight value at the moment. However, beware that if you set it too high you can start hitting 32-bit wraparound issues. For instance, if you set a weight of 10,000,000 and search in extended mode, then maximum possible weight will be equal to 10 million (your weight) by 1 thousand (internal BM25 scaling factor, see ) by 1 or more (phrase proximity rank). The result is at least 10 billion that does not fit in 32 bits and will be wrapped around, producing unexpected results. SetIndexWeights Prototype: function SetIndexWeights ( $weights ) Sets per-index weights, and enables weighted summing of match weights across different indexes. Parameter must be a hash (associative array) mapping string index names to integer weights. Default is empty array that means to disable weighting summing. When a match with the same document ID is found in several different local indexes, by default Sphinx simply chooses the match from the index specified last in the query. This is to support searching through partially overlapping index partitions. However in some cases the indexes are not just partitions, and you might want to sum the weights across the indexes instead of picking one. SetIndexWeights() lets you do that. With summing enabled, final match weight in result set will be computed as a sum of match weight coming from the given index multiplied by respective per-index weight specified in this call. Ie. if the document 123 is found in index A with the weight of 2, and also in index B with the weight of 3, and you called SetIndexWeights ( array ( "A"=>100, "B"=>10 ) ), the final weight return to the client will be 2*100+3*10 = 230. Result set filtering settings SetIDRange Prototype: function SetIDRange ( $min, $max ) Sets an accepted range of document IDs. Parameters must be integers. Defaults are 0 and 0; that combination means to not limit by range. After this call, only those records that have document ID between $min and $max (including IDs exactly equal to $min or $max) will be matched. SetFilter Prototype: function SetFilter ( $attribute, $values, $exclude=false ) Adds new integer values set filter. On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $values must be a plain array containing integer values. $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them. Only those documents where $attribute column value stored in the index matches any of the values from $values array will be matched (or rejected, if $exclude is true). SetFilterRange Prototype: function SetFilterRange ( $attribute, $min, $max, $exclude=false ) Adds new integer range filter. On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $min and $max must be integers that define the acceptable attribute values range (including the boundaries). $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them. Only those documents where $attribute column value stored in the index is between $min and $max (including values that are exactly equal to $min or $max) will be matched (or rejected, if $exclude is true). SetFilterFloatRange Prototype: function SetFilterFloatRange ( $attribute, $min, $max, $exclude=false ) Adds new float range filter. On this call, additional new filter is added to the existing list of filters. $attribute must be a string with attribute name. $min and $max must be floats that define the acceptable attribute values range (including the boundaries). $exclude must be a boolean value; it controls whether to accept the matching documents (default mode, when $exclude is false) or reject them. Only those documents where $attribute column value stored in the index is between $min and $max (including values that are exactly equal to $min or $max) will be matched (or rejected, if $exclude is true). SetGeoAnchor Prototype: function SetGeoAnchor ( $attrlat, $attrlong, $lat, $long ) Sets anchor point for and geosphere distance (geodistance) calculations, and enable them. $attrlat and $attrlong must be strings that contain the names of latitude and longitude attributes, respectively. $lat and $long are floats that specify anchor point latitude and longitude, in radians. Once an anchor point is set, you can use magic "@geodist" attribute name in your filters and/or sorting expressions. Sphinx will compute geosphere distance between the given anchor point and a point specified by latitude and lognitude attributes from each full-text match, and attach this value to the resulting match. The latitude and longitude values both in SetGeoAnchor and the index attribute data are expected to be in radians. The result will be returned in meters, so geodistance value of 1000.0 means 1 km. 1 mile is approximately 1609.344 meters. GROUP BY settings SetGroupBy Prototype: function SetGroupBy ( $attribute, $func, $groupsort="@group desc" ) Sets grouping attribute, function, and groups sorting mode; and enables grouping (as described in ). $attribute is a string that contains group-by attribute name. $func is a constant that chooses a function applied to the attribute value in order to compute group-by key. $groupsort is a clause that controls how the groups will be sorted. Its syntax is similar to that described in . Grouping feature is very similar in nature to GROUP BY clause from SQL. Results produces by this function call are going to be the same as produced by the following pseudo code: SELECT ... GROUP BY $func($attribute) ORDER BY $groupsort Note that it's $groupsort that affects the order of matches in the final result set. Sorting mode (see ) affect the ordering of matches within group, ie. what match will be selected as the best one from the group. So you can for instance order the groups by matches count and select the most relevant match within each group at the same time. Starting with version 0.9.9-rc2, aggregate functions (AVG(), MIN(), MAX(), SUM()) are supported through SetSelect() API call when using GROUP BY. Starting with version 2.0.1-beta, grouping on string attributes is supported, with respect to current collation. SetGroupDistinct Prototype: function SetGroupDistinct ( $attribute ) Sets attribute name for per-group distinct values count calculations. Only available for grouping queries. $attribute is a string that contains the attribute name. For each group, all values of this attribute will be stored (as RAM limits permit), then the amount of distinct values will be calculated and returned to the client. This feature is similar to COUNT(DISTINCT) clause in standard SQL; so these Sphinx calls: $cl->SetGroupBy ( "category", SPH_GROUPBY_ATTR, "@count desc" ); $cl->SetGroupDistinct ( "vendor" ); can be expressed using the following SQL clauses: SELECT id, weight, all-attributes, COUNT(DISTINCT vendor) AS @distinct, COUNT(*) AS @count FROM products GROUP BY category ORDER BY @count DESC In the sample pseudo code shown just above, SetGroupDistinct() call corresponds to COUNT(DISINCT vendor) clause only. GROUP BY, ORDER BY, and COUNT(*) clauses are all an equivalent of SetGroupBy() settings. Both queries will return one matching row for each category. In addition to indexed attributes, matches will also contain total per-category matches count, and the count of distinct vendor IDs within each category. Querying Query Prototype: function Query ( $query, $index="*", $comment="" ) Connects to searchd server, runs given search query with current settings, obtains and returns the result set. $query is a query string. $index is an index name (or names) string. Returns false and sets GetLastError() message on general error. Returns search result set on success. Additionally, the contents of $comment are sent to the query log, marked in square brackets, just before the search terms, which can be very useful for debugging. Currently, the comment is limited to 128 characters. Default value for $index is "*" that means to query all local indexes. Characters allowed in index names include Latin letters (a-z), numbers (0-9), minus sign (-), and underscore (_); everything else is considered a separator. Therefore, all of the following samples calls are valid and will search the same two indexes: $cl->Query ( "test query", "main delta" ); $cl->Query ( "test query", "main;delta" ); $cl->Query ( "test query", "main, delta" ); Index specification order matters. If document with identical IDs are found in two or more indexes, weight and attribute values from the very last matching index will be used for sorting and returning to client (unless explicitly overridden with SetIndexWeights()). Therefore, in the example above, matches from "delta" index will always win over matches from "main". On success, Query() returns a result set that contains some of the found matches (as requested by SetLimits()) and additional general per-query statistics. The result set is a hash (PHP specific; other languages might utilize other structures instead of hash) with the following keys and values: "matches": Hash which maps found document IDs to another small hash containing document weight and attribute values (or an array of the similar small hashes if SetArrayResult() was enabled). "total": Total amount of matches retrieved on server (ie. to the server side result set) by this query. You can retrieve up to this amount of matches from server for this query text with current query settings. "total_found": Total amount of matching documents in index (that were found and procesed on server). "words": Hash which maps query keywords (case-folded, stemmed, and otherwise processed) to a small hash with per-keyword statitics ("docs", "hits"). "error": Query error message reported by searchd (string, human readable). Empty if there were no errors. "warning": Query warning message reported by searchd (string, human readable). Empty if there were no warnings. It should be noted that Query() carries out the same actions as AddQuery() and RunQueries() without the intermediate steps; it is analoguous to a single AddQuery() call, followed by a corresponding RunQueries(), then returning the first array element of matches (from the first, and only, query.) AddQuery Prototype: function AddQuery ( $query, $index="*", $comment="" ) Adds additional query with current settings to multi-query batch. $query is a query string. $index is an index name (or names) string. Additionally if provided, the contents of $comment are sent to the query log, marked in square brackets, just before the search terms, which can be very useful for debugging. Currently, this is limited to 128 characters. Returns index to results array returned from RunQueries(). Batch queries (or multi-queries) enable searchd to perform internal optimizations if possible. They also reduce network connection overheads and search process creation overheads in all cases. They do not result in any additional overheads compared to simple queries. Thus, if you run several different queries from your web page, you should always consider using multi-queries. For instance, running the same full-text query but with different sorting or group-by settings will enable searchd to perform expensive full-text search and ranking operation only once, but compute multiple group-by results from its output. This can be a big saver when you need to display not just plain search results but also some per-category counts, such as the amount of products grouped by vendor. Without multi-query, you would have to run several queries which perform essentially the same search and retrieve the same matches, but create result sets differently. With multi-query, you simply pass all these querys in a single batch and Sphinx optimizes the redundant full-text search internally. AddQuery() internally saves full current settings state along with the query, and you can safely change them afterwards for subsequent AddQuery() calls. Already added queries will not be affected; there's actually no way to change them at all. Here's an example: $cl->SetSortMode ( SPH_SORT_RELEVANCE ); $cl->AddQuery ( "hello world", "documents" ); $cl->SetSortMode ( SPH_SORT_ATTR_DESC, "price" ); $cl->AddQuery ( "ipod", "products" ); $cl->AddQuery ( "harry potter", "books" ); $results = $cl->RunQueries (); With the code above, 1st query will search for "hello world" in "documents" index and sort results by relevance, 2nd query will search for "ipod" in "products" index and sort results by price, and 3rd query will search for "harry potter" in "books" index while still sorting by price. Note that 2nd SetSortMode() call does not affect the first query (because it's already added) but affects both other subsequent queries. Additionally, any filters set up before an AddQuery() will fall through to subsequent queries. So, if SetFilter() is called before the first query, the same filter will be in place for the second (and subsequent) queries batched through AddQuery() unless you call ResetFilters() first. Alternatively, you can add additional filters as well. This would also be true for grouping options and sorting options; no current sorting, filtering, and grouping settings are affected by this call; so subsequent queries will reuse current query settings. AddQuery() returns an index into an array of results that will be returned from RunQueries() call. It is simply a sequentially increasing 0-based integer, ie. first call will return 0, second will return 1, and so on. Just a small helper so you won't have to track the indexes manualy if you need then. RunQueries Prototype: function RunQueries () Connect to searchd, runs a batch of all queries added using AddQuery(), obtains and returns the result sets. Returns false and sets GetLastError() message on general error (such as network I/O failure). Returns a plain array of result sets on success. Each result set in the returned array is exactly the same as the result set returned from Query(). Note that the batch query request itself almost always succeds - unless there's a network error, blocking index rotation in progress, or another general failure which prevents the whole request from being processed. However individual queries within the batch might very well fail. In this case their respective result sets will contain non-empty "error" message, but no matches or query statistics. In the extreme case all queries within the batch could fail. There still will be no general error reported, because API was able to succesfully connect to searchd, submit the batch, and receive the results - but every result set will have a specific error message. ResetFilters Prototype: function ResetFilters () Clears all currently set filters. This call is only normally required when using multi-queries. You might want to set different filters for different queries in the batch. To do that, you should call ResetFilters() and add new filters using the respective calls. ResetGroupBy Prototype: function ResetGroupBy () Clears all currently group-by settings, and disables group-by. This call is only normally required when using multi-queries. You can change individual group-by settings using SetGroupBy() and SetGroupDistinct() calls, but you can not disable group-by using those calls. ResetGroupBy() fully resets previous group-by settings and disables group-by mode in the current state, so that subsequent AddQuery() calls can perform non-grouping searches. Additional functionality BuildExcerpts Prototype: function BuildExcerpts ( $docs, $index, $words, $opts=array() ) Excerpts (snippets) builder function. Connects to searchd, asks it to generate excerpts (snippets) from given documents, and returns the results. $docs is a plain array of strings that carry the documents' contents. $index is an index name string. Different settings (such as charset, morphology, wordforms) from given index will be used. $words is a string that contains the keywords to highlight. They will be processed with respect to index settings. For instance, if English stemming is enabled in the index, "shoes" will be highlighted even if keyword is "shoe". Starting with version 0.9.9-rc1, keywords can contain wildcards, that work similarly to star-syntax available in queries. $opts is a hash which contains additional optional highlighting parameters: "before_match": A string to insert before a keyword match. Starting with version 1.10-beta, a %PASSAGE_ID% macro can be used in this string. The macro is replaced with an incrementing passage number within a current snippet. Numbering starts at 1 by default but can be overridden with "start_passage_id" option. In a multi-document call, %PASSAGE_ID% would restart at every given document. Default is "<b>". "after_match": A string to insert after a keyword match. Starting with version 1.10-beta, a %PASSAGE_ID% macro can be used in this string. Default is "</b>". "chunk_separator": A string to insert between snippet chunks (passages). Default is " ... ". "limit": Maximum snippet size, in symbols (codepoints). Integer, default is 256. "around": How much words to pick around each matching keywords block. Integer, default is 5. "exact_phrase": Whether to highlight exact query phrase matches only instead of individual keywords. Boolean, default is false. "single_passage": Whether to extract single best passage only. Boolean, default is false. "use_boundaries": Whether to additionaly break passages by phrase boundary characters, as configured in index settings with phrase_boundary directive. Boolean, default is false. "weight_order": Whether to sort the extracted passages in order of relevance (decreasing weight), or in order of appearance in the document (increasing position). Boolean, default is false. "query_mode": Added in version 1.10-beta. Whether to handle $words as a query in extended syntax, or as a bag of words (default behavior). For instance, in query mode ("one two" | "three four") will only highlight and include those occurrences "one two" or "three four" when the two words from each pair are adjacent to each other. In default mode, any single occurrence of "one", "two", "three", or "four" would be highlighted. Boolean, default is false. "force_all_words": Added in version 1.10-beta. Ignores the snippet length limit until it includes all the keywords. Boolean, default is false. "limit_passages": Added in version 1.10-beta. Limits the maximum number of passages that can be included into the snippet. Integer, default is 0 (no limit). "limit_words": Added in version 1.10-beta. Limits the maximum number of keywords that can be included into the snippet. Integer, default is 0 (no limit). "start_passage_id": Added in version 1.10-beta. Specifies the starting value of %PASSAGE_ID% macro (that gets detected and expanded in , strings). Integer, default is 1. "load_files": Added in version 1.10-beta. Whether to handle $docs as data to extract snippets from (default behavior), or to treat it as file names, and load data from specified files on the server side. Starting with version 2.0.1-beta, up to dist_threads worker threads per request will be created to parallelize the work when this flag is enabled. Boolean, default is false. Starting with version 2.0.2-beta, building of the snippets could be parallelized between remote agents. Just set the 'dist_threads' param in the config to the value greater than 1, and then invoke the snippets generation over the distributed index, which contain only one(!) first(!) local agent and several remotes. "load_files_scattered": Added in version 2.0.2-beta. It works only with distributed snippets generation with remote agents. The source files for snippets could be distributed among different agents, and the main daemon will merge together all non-erroneous results. So, if one agent of the distributed index has 'file1.txt', another has 'file2.txt' and you call for the snippets with both these files, the sphinx will merge results from the agents together, so you will get the snippets from both 'file1.txt' and 'file2.txt'. Boolean, default is false. If the "load_files" is also set, the request will return the error in case if any of the files is not available anywhere. Otherwise (if "load_files" is not set) it will just return the empty strings for all absent files. The master instance reset this flag when distributes the snippets among agents. So, for agents the absence of a file is not critical error, but for the master it might be so. If you want to be sure that all snippets are actually created, set both "load_files_scattered" and "load_files". If the absense of some snippets caused by some agents is not critical for you - set just "load_files_scattered", leaving "load_files" not set. "html_strip_mode": Added in version 1.10-beta. HTML stripping mode setting. Defaults to "index", which means that index settings will be used. The other values are "none" and "strip", that forcibly skip or apply stripping irregardless of index settings; and "retain", that retains HTML markup and protects it from highlighting. The "retain" mode can only be used when highlighting full documents and thus requires that no snippet size limits are set. String, allowed values are "none", "strip", "index", and "retain". "allow_empty": Added in version 1.10-beta. Allows empty string to be returned as highlighting result when a snippet could not be generated (no keywords match, or no passages fit the limit). By default, the beginning of original text would be returned instead of an empty string. Boolean, default is false. "passage_boundary": Added in version 2.0.1-beta. Ensures that passages do not cross a sentence, paragraph, or zone boundary (when used with an index that has the respective indexing settings enabled). String, allowed values are "sentence", "paragraph", and "zone". "emit_zones": Added in version 2.0.1-beta. Emits an HTML tag with an enclosing zone name before each passage. Boolean, default is false. Snippets extraction algorithm currently favors better passages (with closer phrase matches), and then passages with keywords not yet in snippet. Generally, it will try to highlight the best match with the query, and it will also to highlight all the query keywords, as made possible by the limtis. In case the document does not match the query, beginning of the document trimmed down according to the limits will be return by default. Starting with 1.10-beta, you can also return an empty snippet instead case by setting "allow_empty" option to true. Returns false on failure. Returns a plain array of strings with excerpts (snippets) on success. UpdateAttributes Prototype: function UpdateAttributes ( $index, $attrs, $values ) Instantly updates given attribute values in given documents. Returns number of actually updated documents (0 or more) on success, or -1 on failure. $index is a name of the index (or indexes) to be updated. $attrs is a plain array with string attribute names, listing attributes that are updated. $values is a hash where key is document ID, and value is a plain array of new attribute values. $index can be either a single index name or a list, like in Query(). Unlike Query(), wildcard is not allowed and all the indexes to update must be specified explicitly. The list of indexes can include distributed index names. Updates on distributed indexes will be pushed to all agents. The updates only work with docinfo=extern storage strategy. They are very fast because they're working fully in RAM, but they can also be made persistent: updates are saved on disk on clean searchd shutdown initiated by SIGTERM signal. With additional restrictions, updates are also possible on MVA attributes; refer to mva_updates_pool directive for details. Usage example: $cl->UpdateAttributes ( "test1", array("group_id"), array(1=>array(456)) ); $cl->UpdateAttributes ( "products", array ( "price", "amount_in_stock" ), array ( 1001=>array(123,5), 1002=>array(37,11), 1003=>(25,129) ) ); The first sample statement will update document 1 in index "test1", setting "group_id" to 456. The second one will update documents 1001, 1002 and 1003 in index "products". For document 1001, the new price will be set to 123 and the new amount in stock to 5; for document 1002, the new price will be 37 and the new amount will be 11; etc. BuildKeywords Prototype: function BuildKeywords ( $query, $index, $hits ) Extracts keywords from query using tokenizer settings for given index, optionally with per-keyword occurrence statistics. Returns an array of hashes with per-keyword information. $query is a query to extract keywords from. $index is a name of the index to get tokenizing settings and keyword occurrence statistics from. $hits is a boolean flag that indicates whether keyword occurrence statistics are required. Usage example: $keywords = $cl->BuildKeywords ( "this.is.my query", "test1", false ); EscapeString Prototype: function EscapeString ( $string ) Escapes characters that are treated as special operators by the query language parser. Returns an escaped string. $string is a string to escape. This function might seem redundant because it's trivial to implement in any calling application. However, as the set of special characters might change over time, it makes sense to have an API call that is guaranteed to escape all such characters at all times. Usage example: $escaped = $cl->EscapeString ( "escaping-sample@query/string" ); Status Prototype: function Status () Queries searchd status, and returns an array of status variable name and value pairs. Usage example: $status = $cl->Status (); foreach ( $status as $row ) print join ( ": ", $row ) . "\n"; FlushAttributes Prototype: function FlushAttributes () Forces searchd to flush pending attribute updates to disk, and blocks until completion. Returns a non-negative internal "flush tag" on success. Returns -1 and sets an error message on error. Introduced in version 1.10-beta. Attribute values updated using UpdateAttributes() API call are only kept in RAM until a so-called flush (which writes the current, possibly updated attribute values back to disk). FlushAttributes() call lets you enforce a flush. The call will block until searchd finishes writing the data to disk, which might take seconds or even minutes depending on the total data size (.spa file size). All the currently updated indexes will be flushed. Flush tag should be treated as an ever growing magic number that does not mean anything. It's guaranteed to be non-negative. It is guaranteed to grow over time, though not necessarily in a sequential fashion; for instance, two calls that return 10 and then 1000 respectively are a valid situation. If two calls to FlushAttrs() return the same tag, it means that there were no actual attribute updates in between them, and therefore current flushed state remained the same (for all indexes). Usage example: $status = $cl->FlushAttributes (); if ( $status<0 ) print "ERROR: " . $cl->GetLastError(); Persistent connections Persistent connections allow to use single network connection to run multiple commands that would otherwise require reconnects. Open Prototype: function Open () Opens persistent connection to the server. Close Prototype: function Close () Closes previously opened persistent connection. MySQL storage engine (SphinxSE) SphinxSE overview SphinxSE is MySQL storage engine which can be compiled into MySQL server 5.x using its pluggable architecure. It is not available for MySQL 4.x series. It also requires MySQL 5.0.22 or higher in 5.0.x series, or MySQL 5.1.12 or higher in 5.1.x series. Despite the name, SphinxSE does not actually store any data itself. It is actually a built-in client which allows MySQL server to talk to searchd, run search queries, and obtain search results. All indexing and searching happen outside MySQL. Obvious SphinxSE applications include: easier porting of MySQL FTS applications to Sphinx; allowing Sphinx use with progamming languages for which native APIs are not available yet; optimizations when additional Sphinx result set processing on MySQL side is required (eg. JOINs with original document tables, additional MySQL-side filtering, etc). Installing SphinxSE You will need to obtain a copy of MySQL sources, prepare those, and then recompile MySQL binary. MySQL sources (mysql-5.x.yy.tar.gz) could be obtained from dev.mysql.com Web site. For some MySQL versions, there are delta tarballs with already prepared source versions available from Sphinx Web site. After unzipping those over original sources MySQL would be ready to be configured and built with Sphinx support. If such tarball is not available, or does not work for you for any reason, you would have to prepare sources manually. You will need to GNU Autotools framework (autoconf, automake and libtool) installed to do that. Compiling MySQL 5.0.x with SphinxSE copy sphinx.5.0.yy.diff patch file into MySQL sources directory and run patch -p1 < sphinx.5.0.yy.diff If there's no .diff file exactly for the specific version you need to build, try applying .diff with closest version numbers. It is important that the patch should apply with no rejects. in MySQL sources directory, run sh BUILD/autorun.sh in MySQL sources directory, create sql/sphinx directory in and copy all files in mysqlse directory from Sphinx sources there. Example: cp -R /root/builds/sphinx-0.9.7/mysqlse /root/builds/mysql-5.0.24/sql/sphinx configure MySQL and enable Sphinx engine: ./configure --with-sphinx-storage-engine build and install MySQL: make make install Compiling MySQL 5.1.x with SphinxSE in MySQL sources directory, create storage/sphinx directory in and copy all files in mysqlse directory from Sphinx sources there. Example: cp -R /root/builds/sphinx-0.9.7/mysqlse /root/builds/mysql-5.1.14/storage/sphinx in MySQL sources directory, run sh BUILD/autorun.sh configure MySQL and enable Sphinx engine: ./configure --with-plugins=sphinx build and install MySQL: make make install Checking SphinxSE installation To check whether SphinxSE has been succesfully compiled into MySQL, launch newly built servers, run mysql client and issue SHOW ENGINES query. You should see a list of all available engines. Sphinx should be present and "Support" column should contain "YES": mysql> show engines; +------------+----------+-------------------------------------------------------------+ | Engine | Support | Comment | +------------+----------+-------------------------------------------------------------+ | MyISAM | DEFAULT | Default engine as of MySQL 3.23 with great performance | ... | SPHINX | YES | Sphinx storage engine | ... +------------+----------+-------------------------------------------------------------+ 13 rows in set (0.00 sec) Using SphinxSE To search via SphinxSE, you would need to create special ENGINE=SPHINX "search table", and then SELECT from it with full text query put into WHERE clause for query column. Let's begin with an example create statement and search query: CREATE TABLE t1 ( id INTEGER UNSIGNED NOT NULL, weight INTEGER NOT NULL, query VARCHAR(3072) NOT NULL, group_id INTEGER, INDEX(query) ) ENGINE=SPHINX CONNECTION="sphinx://localhost:9312/test"; SELECT * FROM t1 WHERE query='test it;mode=any'; First 3 columns of search table must have a types of INTEGER UNSINGED or BIGINT for the 1st column (document id), INTEGER or BIGINT for the 2nd column (match weight), and VARCHAR or TEXT for the 3rd column (your query), respectively. This mapping is fixed; you can not omit any of these three required columns, or move them around, or change types. Also, query column must be indexed; all the others must be kept unindexed. Columns' names are ignored so you can use arbitrary ones. Additional columns must be either INTEGER, TIMESTAMP, BIGINT, VARCHAR, or FLOAT. They will be bound to attributes provided in Sphinx result set by name, so their names must match attribute names specified in sphinx.conf. If there's no such attribute name in Sphinx search results, column will have NULL values. Special "virtual" attributes names can also be bound to SphinxSE columns. _sph_ needs to be used instead of @ for that. For instance, to obtain the values of @groupby, @count, or @distinct virtual attributes, use _sph_groupby, _sph_count or _sph_distinct column names, respectively. CONNECTION string parameter can be used to specify default searchd host, port and indexes for queries issued using this table. If no connection string is specified in CREATE TABLE, index name "*" (ie. search all indexes) and localhost:9312 are assumed. Connection string syntax is as follows: CONNECTION="sphinx://HOST:PORT/INDEXNAME" You can change the default connection string later: ALTER TABLE t1 CONNECTION="sphinx://NEWHOST:NEWPORT/NEWINDEXNAME"; You can also override all these parameters per-query. As seen in example, both query text and search options should be put into WHERE clause on search query column (ie. 3rd column); the options are separated by semicolons; and their names from values by equality sign. Any number of options can be specified. Available options are: query - query text; mode - matching mode. Must be one of "all", "any", "phrase", "boolean", or "extended". Default is "all"; sort - match sorting mode. Must be one of "relevance", "attr_desc", "attr_asc", "time_segments", or "extended". In all modes besides "relevance" attribute name (or sorting clause for "extended") is also required after a colon: ... WHERE query='test;sort=attr_asc:group_id'; ... WHERE query='test;sort=extended:@weight desc, group_id asc'; offset - offset into result set, default is 0; limit - amount of matches to retrieve from result set, default is 20; index - names of the indexes to search: ... WHERE query='test;index=test1;'; ... WHERE query='test;index=test1,test2,test3;'; minid, maxid - min and max document ID to match; weights - comma-separated list of weights to be assigned to Sphinx full-text fields: ... WHERE query='test;weights=1,2,3;'; filter, !filter - comma-separated attribute name and a set of values to match: # only include groups 1, 5 and 19 ... WHERE query='test;filter=group_id,1,5,19;'; # exclude groups 3 and 11 ... WHERE query='test;!filter=group_id,3,11;'; range, !range - comma-separated attribute name, min and max value to match: # include groups from 3 to 7, inclusive ... WHERE query='test;range=group_id,3,7;'; # exclude groups from 5 to 25 ... WHERE query='test;!range=group_id,5,25;'; maxmatches - per-query max matches value, as in max_matches parameter to SetLimits() API call: ... WHERE query='test;maxmatches=2000;'; cutoff - maximum allowed matches, as in cutoff parameter to SetLimits() API call: ... WHERE query='test;cutoff=10000;'; maxquerytme - maximum allowed query time (in milliseconds), as in SetMaxQueryTime() API call: ... WHERE query='test;maxquerytime=1000;'; groupby - group-by function and attribute, corresponding to SetGroupBy() API call: ... WHERE query='test;groupby=day:published_ts;'; ... WHERE query='test;groupby=attr:group_id;'; groupsort - group-by sorting clause: ... WHERE query='test;groupsort=@count desc;'; distinct - an attribute to compute COUNT(DISTINCT) for when doing group-by, as in SetGroupDistinct() API call: ... WHERE query='test;groupby=attr:country_id;distinct=site_id'; indexweights - comma-separated list of index names and weights to use when searching through several indexes: ... WHERE query='test;indexweights=idx_exact,2,idx_stemmed,1;'; comment - a string to mark this query in query log (mapping to $comment parameter in Query() API call): ... WHERE query='test;comment=marker001;'; select - a string with expressions to compute (mapping to SetSelect() API call): ... WHERE query='test;select=2*a+3*b as myexpr;'; host, port - remote searchd host name and TCP port, respectively: ... WHERE query='test;host=sphinx-test.loc;port=7312;'; ranker - a ranking function to use with "extended" matching mode, as in SetRankingMode() API call (the only mode that supports full query syntax). Known values are "proximity_bm25", "bm25", "none", "wordcount", "proximity", "matchany", "fieldmask"; and, starting with 2.0.4-release, "expr:EXPRESSION" syntax to support expression-based ranker (where EXPRESSION should be replaced with your specific ranking formula): ... WHERE query='test;mode=extended;ranker=bm25;'; ... WHERE query='test;mode=extended;ranker=expr:sum(lcs);'; geoanchor - geodistance anchor, as in SetGeoAnchor() API call. Takes 4 parameters which are latitude and longiture attribute names, and anchor point coordinates respectively: ... WHERE query='test;geoanchor=latattr,lonattr,0.123,0.456'; One very important note that it is much more efficient to allow Sphinx to perform sorting, filtering and slicing the result set than to raise max matches count and use WHERE, ORDER BY and LIMIT clauses on MySQL side. This is for two reasons. First, Sphinx does a number of optimizations and performs better than MySQL on these tasks. Second, less data would need to be packed by searchd, transferred and unpacked by SphinxSE. Starting with version 0.9.9-rc1, additional query info besides result set could be retrieved with SHOW ENGINE SPHINX STATUS statement: mysql> SHOW ENGINE SPHINX STATUS; +--------+-------+-------------------------------------------------+ | Type | Name | Status | +--------+-------+-------------------------------------------------+ | SPHINX | stats | total: 25, total found: 25, time: 126, words: 2 | | SPHINX | words | sphinx:591:1256 soft:11076:15945 | +--------+-------+-------------------------------------------------+ 2 rows in set (0.00 sec) This information can also be accessed through status variables. Note that this method does not require super-user privileges. mysql> SHOW STATUS LIKE 'sphinx_%'; +--------------------+----------------------------------+ | Variable_name | Value | +--------------------+----------------------------------+ | sphinx_total | 25 | | sphinx_total_found | 25 | | sphinx_time | 126 | | sphinx_word_count | 2 | | sphinx_words | sphinx:591:1256 soft:11076:15945 | +--------------------+----------------------------------+ 5 rows in set (0.00 sec) You could perform JOINs on SphinxSE search table and tables using other engines. Here's an example with "documents" from example.sql: mysql> SELECT content, date_added FROM test.documents docs -> JOIN t1 ON (docs.id=t1.id) -> WHERE query="one document;mode=any"; +-------------------------------------+---------------------+ | content | docdate | +-------------------------------------+---------------------+ | this is my test document number two | 2006-06-17 14:04:28 | | this is my test document number one | 2006-06-17 14:04:28 | +-------------------------------------+---------------------+ 2 rows in set (0.00 sec) mysql> SHOW ENGINE SPHINX STATUS; +--------+-------+---------------------------------------------+ | Type | Name | Status | +--------+-------+---------------------------------------------+ | SPHINX | stats | total: 2, total found: 2, time: 0, words: 2 | | SPHINX | words | one:1:2 document:2:2 | +--------+-------+---------------------------------------------+ 2 rows in set (0.00 sec) Building snippets (excerpts) via MySQL Starting with version 0.9.9-rc2, SphinxSE also includes a UDF function that lets you create snippets through MySQL. The functionality is fully similar to BuildExcerprts API call but accesible through MySQL+SphinxSE. The binary that provides the UDF is named sphinx.so and should be automatically built and installed to proper location along with SphinxSE itself. If it does not get installed automatically for some reason, look for sphinx.so in the build directory and copy it to the plugins directory of your MySQL instance. After that, register the UDF using the following statement: CREATE FUNCTION sphinx_snippets RETURNS STRING SONAME 'sphinx.so'; Function name must be sphinx_snippets, you can not use an arbitrary name. Function arguments are as follows: Prototype: function sphinx_snippets ( document, index, words, [options] ); Document and words arguments can be either strings or table columns. Options must be specified like this: 'value' AS option_name. For a list of supported options, refer to BuildExcerprts() API call. The only UDF-specific additional option is named 'sphinx' and lets you specify searchd location (host and port). Usage examples: SELECT sphinx_snippets('hello world doc', 'main', 'world', 'sphinx://192.168.1.1/' AS sphinx, true AS exact_phrase, '[b]' AS before_match, '[/b]' AS after_match) FROM documents; SELECT title, sphinx_snippets(text, 'index', 'mysql php') AS text FROM sphinx, documents WHERE query='mysql php' AND sphinx.id=documents.id; Reporting bugs Unfortunately, Sphinx is not yet 100% bug free (even though I'm working hard towards that), so you might occasionally run into some issues. Reporting as much as possible about each bug is very important - because to fix it, I need to be able either to reproduce and debug the bug, or to deduce what's causing it from the information that you provide. So here are some instructions on how to do that. Build-time issues If Sphinx fails to build for some reason, please do the following: check that headers and libraries for your DBMS are properly installed (for instance, check that mysql-devel package is present); report Sphinx version and config file (be sure to remove the passwords!), MySQL (or PostgreSQL) configuration info, gcc version, OS version and CPU type (ie. x86, x86-64, PowerPC, etc): mysql_config gcc --version uname -a report the error message which is produced by configure or gcc (it should be to include error message itself only, not the whole build log). Run-time issues If Sphinx builds and runs, but there are any problems running it, please do the following: describe the bug (ie. both the expected behavior and actual behavior) and all the steps necessary to reproduce it; include Sphinx version and config file (be sure to remove the passwords!), MySQL (or PostgreSQL) version, gcc version, OS version and CPU type (ie. x86, x86-64, PowerPC, etc): mysql --version gcc --version uname -a build, install and run debug versions of all Sphinx programs (this is to enable a lot of additional internal checks, so-called assertions): make distclean ./configure --with-debug make install killall -TERM searchd reindex to check if any assertions are triggered (in this case, it's likely that the index is corrupted and causing problems); if the bug does not reproduce with debug versions, revert to non-debug and mention it in your report; if the bug could be easily reproduced with a small (1-100 record) part of your database, please provide a gzipped dump of that part; if the problem is related to searchd, include relevant entries from searchd.log and query.log in your bug report; if the problem is related to searchd, try running it in console mode and check if it dies with an assertion: ./searchd --console if any program dies with an assertion, provide the assertion message. Debugging assertions, crashes and hangups If any program dies with an assertion, crashes without an assertion or hangs up, you would additionally need to generate a core dump and examine it. enable core dumps. On most Linux systems, this is done using ulimit: ulimit -c 32768 run the program and try to reproduce the bug; if the program crashes (either with or without an assertion), find the core file in current directory (it should typically print out "Segmentation fault (core dumped)" message); if the program hangs, use kill -SEGV from another console to force it to exit and dump core: kill -SEGV HANGED-PROCESS-ID use gdb to examine the core file and obtain a backtrace: gdb ./CRASHED-PROGRAM-FILE-NAME CORE-DUMP-FILE-NAME (gdb) bt (gdb) quit Note that HANGED-PROCESS-ID, CRASHED-PROGRAM-FILE-NAME and CORE-DUMP-FILE-NAME must all be replaced with specific numbers and file names. For example, hanged searchd debugging session would look like: # kill -SEGV 12345 # ls *core* core.12345 # gdb ./searchd core.12345 (gdb) bt ... (gdb) quit Note that ulimit is not server-wide and only affects current shell session. This means that you will not have to restore any server-wide limits - but if you relogin, you will have to set ulimit again. Core dumps should be placed in current working directory (and Sphinx programs do not change it), so this is where you would look for them. Please do not immediately remove the core file because there could be additional helpful information which could be retrieved from it. You do not need to send me this file (as the debug info there is closely tied to your system) but I might need to ask you a few additional questions about it. <filename>sphinx.conf</filename> options reference Data source configuration options type Data source type. Mandatory, no default value. Known types are , , , and , and . All other per-source options depend on source type selected by this option. Names of the options used for SQL sources (ie. MySQL, PostgreSQL, MS SQL) start with "sql_"; names of the ones used for xmlpipe and xmlpipe2 start with "xmlpipe_". All source types except are conditional; they might or might not be supported depending on your build settings, installed client libraries, etc. type is currently only available on Windows. type is available both on Windows natively and on Linux through UnixODBC library. Example: type = mysql sql_host SQL server host to connect to. Mandatory, no default value. Applies to SQL source types (, , ) only. In the simplest case when Sphinx resides on the same host with your MySQL or PostgreSQL installation, you would simply specify "localhost". Note that MySQL client library chooses whether to connect over TCP/IP or over UNIX socket based on the host name. Specifically "localhost" will force it to use UNIX socket (this is the default and generally recommended mode) and "127.0.0.1" will force TCP/IP usage. Refer to MySQL manual for more details. Example: sql_host = localhost sql_port SQL server IP port to connect to. Optional, default is 3306 for source type and 5432 for type. Applies to SQL source types (, , ) only. Note that it depends on sql_host setting whether this value will actually be used. Example: sql_port = 3306 sql_user SQL user to use when connecting to sql_host. Mandatory, no default value. Applies to SQL source types (, , ) only. Example: sql_user = test sql_pass SQL user password to use when connecting to sql_host. Mandatory, no default value. Applies to SQL source types (, , ) only. Example: sql_pass = mysecretpassword sql_db SQL database (in MySQL terms) to use after the connection and perform further queries within. Mandatory, no default value. Applies to SQL source types (, , ) only. Example: sql_db = test sql_sock UNIX socket name to connect to for local SQL servers. Optional, default value is empty (use client library default settings). Applies to SQL source types (, , ) only. On Linux, it would typically be /var/lib/mysql/mysql.sock. On FreeBSD, it would typically be /tmp/mysql.sock. Note that it depends on sql_host setting whether this value will actually be used. Example: sql_sock = /tmp/mysql.sock mysql_connect_flags MySQL client connection flags. Optional, default value is 0 (do not set any flags). Applies to source type only. This option must contain an integer value with the sum of the flags. The value will be passed to mysql_real_connect() verbatim. The flags are enumerated in mysql_com.h include file. Flags that are especially interesting in regard to indexing, with their respective values, are as follows: CLIENT_COMPRESS = 32; can use compression protocol CLIENT_SSL = 2048; switch to SSL after handshake CLIENT_SECURE_CONNECTION = 32768; new 4.1 authentication For instance, you can specify 2080 (2048+32) to use both compression and SSL, or 32768 to use new authentication only. Initially, this option was introduced to be able to use compression when the indexer and mysqld are on different hosts. Compression on 1 Gbps links is most likely to hurt indexing time though it reduces network traffic, both in theory and in practice. However, enabling compression on 100 Mbps links may improve indexing time significantly (upto 20-30% of the total indexing time improvement was reported). Your mileage may vary. Example: mysql_connect_flags = 32 # enable compression mysql_ssl_cert, mysql_ssl_key, mysql_ssl_ca SSL certificate settings to use for connecting to MySQL server. Optional, default values are empty strings (do not use SSL). Applies to source type only. These directives let you set up secure SSL connection between indexer and MySQL. The details on creating the certificates and setting up MySQL server can be found in MySQL documentation. Example: mysql_ssl_cert = /etc/ssl/client-cert.pem mysql_ssl_key = /etc/ssl/client-key.pem mysql_ssl_ca = /etc/ssl/cacert.pem odbc_dsn ODBC DSN to connect to. Mandatory, no default value. Applies to source type only. ODBC DSN (Data Source Name) specifies the credentials (host, user, password, etc) to use when connecting to ODBC data source. The format depends on specific ODBC driver used. Example: odbc_dsn = Driver={Oracle ODBC Driver};Dbq=myDBName;Uid=myUsername;Pwd=myPassword sql_query_pre Pre-fetch query, or pre-query. Multi-value, optional, default is empty list of queries. Applies to SQL source types (, , ) only. Multi-value means that you can specify several pre-queries. They are executed before the main fetch query, and they will be exectued exactly in order of appeareance in the configuration file. Pre-query results are ignored. Pre-queries are useful in a lot of ways. They are used to setup encoding, mark records that are going to be indexed, update internal counters, set various per-connection SQL server options and variables, and so on. Perhaps the most frequent pre-query usage is to specify the encoding that the server will use for the rows it returnes. It must match the encoding that Sphinx expects (as specified by charset_type and charset_table options). Two MySQL specific examples of setting the encoding are: sql_query_pre = SET CHARACTER_SET_RESULTS=cp1251 sql_query_pre = SET NAMES utf8 Also specific to MySQL sources, it is useful to disable query cache (for indexer connection only) in pre-query, because indexing queries are not going to be re-run frequently anyway, and there's no sense in caching their results. That could be achieved with: sql_query_pre = SET SESSION query_cache_type=OFF Example: sql_query_pre = SET NAMES utf8 sql_query_pre = SET SESSION query_cache_type=OFF sql_query Main document fetch query. Mandatory, no default value. Applies to SQL source types (, , ) only. There can be only one main query. This is the query which is used to retrieve documents from SQL server. You can specify up to 32 full-text fields (formally, upto SPH_MAX_FIELDS from sphinx.h), and an arbitrary amount of attributes. All of the columns that are neither document ID (the first one) nor attributes will be full-text indexed. Document ID MUST be the very first field, and it MUST BE UNIQUE UNSIGNED POSITIVE (NON-ZERO, NON-NEGATIVE) INTEGER NUMBER. It can be either 32-bit or 64-bit, depending on how you built Sphinx; by default it builds with 32-bit IDs support but option to configure allows to build with 64-bit document and word IDs support. Example: sql_query = \ SELECT id, group_id, UNIX_TIMESTAMP(date_added) AS date_added, \ title, content \ FROM documents sql_joined_field Joined/payload field fetch query. Multi-value, optional, default is empty list of queries. Applies to SQL source types (, , ) only. lets you use two different features: joined fields, and payloads (payload fields). It's syntax is as follows: sql_joined_field = FIELD-NAME 'from' ( 'query' | 'payload-query' ); \ QUERY [ ; RANGE-QUERY ] where FIELD-NAME is a joined/payload field name; QUERY is an SQL query that must fetch values to index. RANGE-QUERY is an optional SQL query that fetches a range of values to index. (Added in version 2.0.1-beta.) Joined fields let you avoid JOIN and/or GROUP_CONCAT statements in the main document fetch query (sql_query). This can be useful when SQL-side JOIN is slow, or needs to be offloaded on Sphinx side, or simply to emulate MySQL-specific GROUP_CONCAT funcionality in case your database server does not support it. The query must return exactly 2 columns: document ID, and text to append to a joined field. Document IDs can be duplicate, but they must be in ascending order. All the text rows fetched for a given ID will be concatented together, and the concatenation result will be indexed as the entire contents of a joined field. Rows will be concatenated in the order returned from the query, and separating whitespace will be inserted between them. For instance, if joined field query returns the following rows: ( 1, 'red' ) ( 1, 'right' ) ( 1, 'hand' ) ( 2, 'mysql' ) ( 2, 'sphinx' ) then the indexing results would be equivalent to that of adding a new text field with a value of 'red right hand' to document 1 and 'mysql sphinx' to document 2. Joined fields are only indexed differently. There are no other differences between joined fields and regular text fields. Starting with 2.0.1-beta, ranged queries can be used when a single query is not efficient enough or does not work because of the database driver limitations. It works similar to the ranged queries in the main indexing loop, see . The range will be queried for and fetched upfront once, then multiple queries with different $start and $end substitutions will be run to fetch the actual data. Payloads let you create a special field in which, instead of keyword positions, so-called user payloads are stored. Payloads are custom integer values attached to every keyword. They can then be used in search time to affect the ranking. The payload query must return exactly 3 columns: document ID; keyword; and integer payload value. Document IDs can be duplicate, but they must be in ascending order. Payloads must be unsigned integers within 24-bit range, ie. from 0 to 16777215. For reference, payloads are currently internally stored as in-field keyword positions, but that is not guaranteed and might change in the future. Currently, the only method to account for payloads is to use SPH_RANK_PROXIMITY_BM25 ranker. On indexes with payload fields, it will automatically switch to a variant that matches keywords in those fields, computes a sum of matched payloads multiplied by field wieghts, and adds that sum to the final rank. Example: sql_joined_field = \ tagstext from query; \ SELECT docid, CONCAT('tag',tagid) FROM tags ORDER BY docid ASC sql_query_range Range query setup. Optional, default is empty. Applies to SQL source types (, , ) only. Setting this option enables ranged document fetch queries (see ). Ranged queries are useful to avoid notorious MyISAM table locks when indexing lots of data. (They also help with other less notorious issues, such as reduced performance caused by big result sets, or additional resources consumed by InnoDB to serialize big read transactions.) The query specified in this option must fetch min and max document IDs that will be used as range boundaries. It must return exactly two integer fields, min ID first and max ID second; the field names are ignored. When ranged queries are enabled, sql_query will be required to contain and macros (because it obviously would be a mistake to index the whole table many times over). Note that the intervals specified by .. will not overlap, so you should not remove document IDs that are exactly equal to or from your query. The example in ) illustrates that; note how it uses greater-or-equal and less-or-equal comparisons. Example: sql_query_range = SELECT MIN(id),MAX(id) FROM documents sql_range_step Range query step. Optional, default is 1024. Applies to SQL source types (, , ) only. Only used when ranged queries are enabled. The full document IDs interval fetched by sql_query_range will be walked in this big steps. For example, if min and max IDs fetched are 12 and 3456 respectively, and the step is 1000, indexer will call sql_query several times with the following substitutions: $start=12, $end=1011 $start=1012, $end=2011 $start=2012, $end=3011 $start=3012, $end=3456 Example: sql_range_step = 1000 sql_query_killlist Kill-list query. Optional, default is empty (no query). Applies to SQL source types (, , ) only. Introduced in version 0.9.9-rc1. This query is expected to return a number of 1-column rows, each containing just the document ID. The returned document IDs are stored within an index. Kill-list for a given index suppresses results from other indexes, depending on index order in the query. The intended use is to help implement deletions and updates on existing indexes without rebuilding (actually even touching them), and especially to fight phantom results problem. Let us dissect an example. Assume we have two indexes, 'main' and 'delta'. Assume that documents 2, 3, and 5 were deleted since last reindex of 'main', and documents 7 and 11 were updated (ie. their text contents were changed). Assume that a keyword 'test' occurred in all these mentioned documents when we were indexing 'main'; still occurs in document 7 as we index 'delta'; but does not occur in document 11 any more. We now reindex delta and then search through both these indexes in proper (least to most recent) order: $res = $cl->Query ( "test", "main delta" ); First, we need to properly handle deletions. The result set should not contain documents 2, 3, or 5. Second, we also need to avoid phantom results. Unless we do something about it, document 11 will appear in search results! It will be found in 'main' (but not 'delta'). And it will make it to the final result set unless something stops it. Kill-list, or K-list for short, is that something. Kill-list attached to 'delta' will suppress the specified rows from all the preceding indexes, in this case just 'main'. So to get the expected results, we should put all the updated and deleted document IDs into it. Example: sql_query_killlist = \ SELECT id FROM documents WHERE updated_ts>=@last_reindex UNION \ SELECT id FROM documents_deleted WHERE deleted_ts>=@last_reindex sql_attr_uint Unsigned integer attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. The column value should fit into 32-bit unsigned integer range. Values outside this range will be accepted but wrapped around. For instance, -1 will be wrapped around to 2^32-1 or 4,294,967,295. You can specify bit count for integer attributes by appending ':BITCOUNT' to attribute name (see example below). Attributes with less than default 32-bit size, or bitfields, perform slower. But they require less RAM when using extern storage: such bitfields are packed together in 32-bit chunks in .spa attribute data file. Bit size settings are ignored if using inline storage. Example: sql_attr_uint = group_id sql_attr_uint = forum_id:9 # 9 bits for forum_id sql_attr_bool Boolean attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. Equivalent to sql_attr_uint declaration with a bit count of 1. Example: sql_attr_bool = is_deleted # will be packed to 1 bit sql_attr_bigint 64-bit signed integer attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. Note that unlike sql_attr_uint, these values are signed. Introduced in version 0.9.9-rc1. Example: sql_attr_bigint = my_bigint_id sql_attr_timestamp UNIX timestamp attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. Timestamps can store date and time in the range of Jan 01, 1970 to Jan 19, 2038 with a precision of one second. The expected column value should be a timestamp in UNIX format, ie. 32-bit unsigned integer number of seconds elapsed since midnight, January 01, 1970, GMT. Timestamps are internally stored and handled as integers everywhere. But in addition to working with timestamps as integers, it's also legal to use them along with different date-based functions, such as time segments sorting mode, or day/week/month/year extraction for GROUP BY. Note that DATE or DATETIME column types in MySQL can not be directly used as timestamp attributes in Sphinx; you need to explicitly convert such columns using UNIX_TIMESTAMP function (if data is in range). Note timestamps can not represent dates before January 01, 1970, and UNIX_TIMESTAMP() in MySQL will not return anything expected. If you only needs to work with dates, not times, consider TO_DAYS() function in MySQL instead. Example: # sql_query = ... UNIX_TIMESTAMP(added_datetime) AS added_ts ... sql_attr_timestamp = added_ts sql_attr_str2ordinal Ordinal string number attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. This attribute type (so-called ordinal, for brevity) is intended to allow sorting by string values, but without storing the strings themselves. When indexing ordinals, string values are fetched from database, temporarily stored, sorted, and then replaced by their respective ordinal numbers in the array of sorted strings. So, the ordinal number is an integer such that sorting by it produces the same result as if lexicographically sorting by original strings. by string values lexicographically. Earlier versions could consume a lot of RAM for indexing ordinals. Starting with revision r1112, ordinals accumulation and sorting also runs in fixed memory (at the cost of using additional temporary disk space), and honors mem_limit settings. Ideally the strings should be sorted differently, depending on the encoding and locale. For instance, if the strings are known to be Russian text in KOI8R encoding, sorting the bytes 0xE0, 0xE1, and 0xE2 should produce 0xE1, 0xE2 and 0xE0, because in KOI8R value 0xE0 encodes a character that is (noticeably) after characters encoded by 0xE1 and 0xE2. Unfortunately, Sphinx does not support that at the moment and will simply sort the strings bytewise. Note that the ordinals are by construction local to each index, and it's therefore impossible to merge ordinals while retaining the proper order. The processed strings are replaced by their sequential number in the index they occurred in, but different indexes have different sets of strings. For instance, if 'main' index contains strings "aaa", "bbb", "ccc", and so on up to "zzz", they'll be assigned numbers 1, 2, 3, and so on up to 26, respectively. But then if 'delta' only contains "zzz" the assigned number will be 1. And after the merge, the order will be broken. Unfortunately, this is impossible to workaround without storing the original strings (and once Sphinx supports storing the original strings, ordinals will not be necessary any more). Example: sql_attr_str2ordinal = author_name sql_attr_float Floating point attribute declaration. Multi-value (there might be multiple attributes declared), optional. Applies to SQL source types (, , ) only. The values will be stored in single precision, 32-bit IEEE 754 format. Represented range is approximately from 1e-38 to 1e+38. The amount of decimal digits that can be stored precisely is approximately 7. One important usage of the float attributes is storing latitude and longitude values (in radians), for further usage in query-time geosphere distance calculations. Example: sql_attr_float = lat_radians sql_attr_float = long_radians sql_attr_multi Multi-valued attribute (MVA) declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (, , ) only. Plain attributes only allow to attach 1 value per each document. However, there are cases (such as tags or categories) when it is desired to attach multiple values of the same attribute and be able to apply filtering or grouping to value lists. The declaration format is as follows (backslashes are for clarity only; everything can be declared in a single line as well): sql_attr_multi = ATTR-TYPE ATTR-NAME 'from' SOURCE-TYPE \ [;QUERY] \ [;RANGE-QUERY] where ATTR-TYPE is 'uint', 'bigint' or 'timestamp' SOURCE-TYPE is 'field', 'query', or 'ranged-query' QUERY is SQL query used to fetch all ( docid, attrvalue ) pairs RANGE-QUERY is SQL query used to fetch min and max ID values, similar to 'sql_query_range' Example: sql_attr_multi = uint tag from query; SELECT id, tag FROM tags sql_attr_multi = bigint tag from ranged-query; \ SELECT id, tag FROM tags WHERE id>=$start AND id<=$end; \ SELECT MIN(id), MAX(id) FROM tags sql_attr_string String attribute declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (, , ) only. Introduced in version 1.10-beta. String attributes can store arbitrary strings attached to every document. There's a fixed size limit of 4 MB per value. Also, searchd will currently cache all the values in RAM, which is an additional implicit limit. As of 1.10-beta, strings can only be used for storage and retrieval. They can not participate in expressions, be used for filtering, sorting, or grouping (ie. in WHERE, ORDER or GROUP clauses). Note that attributes declared using will not be full-text indexed; you can use sql_field_string directive for that. Example: sql_attr_string = title # will be stored but will not be indexed sql_attr_str2wordcount Word-count attribute declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (, , ) only. Introduced in version 1.10-beta. Word-count attribute takes a string column, tokenizes it according to index settings, and stores the resulting number of tokens in an attribute. This number of tokens ("word count") is a normal integer that can be later used, for instance, in custom ranking expressions (boost shorter titles, help identify exact field matches, etc). Example: sql_attr_str2wordcount = title_wc sql_column_buffers Per-column buffer sizes. Optional, default is empty (deduce the sizes automatically). Applies to , source types only. Introduced in version 2.0.1-beta. ODBC and MS SQL drivers sometimes can not return the maximum actual column size to be expected. For instance, NVARCHAR(MAX) columns always report their length as 2147483647 bytes to indexer even though the actually used length is likely considerably less. However, the receiving buffers still need to be allocated upfront, and their sizes have to be determined. When the driver does not report the column length at all, Sphinx allocates default 1 KB buffers for each non-char column, and 1 MB buffers for each char column. Driver-reported column length also gets clamped by an upper limie of 8 MB, so in case the driver reports (almost) a 2 GB column length, it will be clamped and a 8 MB buffer will be allocated instead for that column. These hard-coded limits can be overridden using the sql_column_buffers directive, either in order to save memory on actually shorter columns, or overcome the 8 MB limit on actually longer columns. The directive values must be a comma-separated lists of selected column names and sizes: sql_column_buffers = <colname>=<size>[K|M] [, ...] Example: sql_query = SELECT id, mytitle, mycontent FROM documents sql_column_buffers = mytitle=64K, mycontent=10M sql_field_string Combined string attribute and full-text field declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (, , ) only. Introduced in version 1.10-beta. sql_attr_string only stores the column value but does not full-text index it. In some cases it might be desired to both full-text index the column and store it as attribute. lets you do exactly that. Both the field and the attribute will be named the same. Example: sql_field_string = title # will be both indexed and stored sql_field_str2wordcount Combined word-count attribute and full-text field declaration. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to SQL source types (, , ) only. Introduced in version 1.10-beta. sql_attr_str2wordcount only stores the column word count but does not full-text index it. In some cases it might be desired to both full-text index the column and also have the count. lets you do exactly that. Both the field and the attribute will be named the same. Example: sql_field_str2wordcount = title # will be indexed, and counted/stored sql_file_field File based field declaration. Applies to SQL source types (, , ) only. Introduced in version 1.10-beta. This directive makes indexer interpret field contents as a file name, and load and index the referred file. Files larger than max_file_field_buffer in size are skipped. Any errors during the file loading (IO errors, missed limits, etc) will be reported as indexing warnings and will not early terminate the indexing. No content will be indexed for such files. Example: sql_file_field = my_file_path # load and index files referred to by my_file_path sql_query_post Post-fetch query. Optional, default value is empty. Applies to SQL source types (, , ) only. This query is executed immediately after sql_query completes successfully. When post-fetch query produces errors, they are reported as warnings, but indexing is not terminated. It's result set is ignored. Note that indexing is not yet completed at the point when this query gets executed, and further indexing still may fail. Therefore, any permanent updates should not be done from here. For instance, updates on helper table that permanently change the last successfully indexed ID should not be run from post-fetch query; they should be run from post-index query instead. Example: sql_query_post = DROP TABLE my_tmp_table sql_query_post_index Post-index query. Optional, default value is empty. Applies to SQL source types (, , ) only. This query is executed when indexing is fully and succesfully completed. If this query produces errors, they are reported as warnings, but indexing is not terminated. It's result set is ignored. $maxid macro can be used in its text; it will be expanded to maximum document ID which was actually fetched from the database during indexing. If no documents were indexed, $maxid will be expanded to 0. Example: sql_query_post_index = REPLACE INTO counters ( id, val ) \ VALUES ( 'max_indexed_id', $maxid ) sql_ranged_throttle Ranged query throttling period, in milliseconds. Optional, default is 0 (no throttling). Applies to SQL source types (, , ) only. Throttling can be useful when indexer imposes too much load on the database server. It causes the indexer to sleep for given amount of milliseconds once per each ranged query step. This sleep is unconditional, and is performed before the fetch query. Example: sql_ranged_throttle = 1000 # sleep for 1 sec before each query step sql_query_info Document info query. Optional, default is empty. Applies to source type only. Only used by CLI search to fetch and display document information, only works with MySQL at the moment, and only intended for debugging purposes. This query fetches the row that will be displayed by CLI search utility for each document ID. It is required to contain $id macro that expands to the queried document ID. Example: sql_query_info = SELECT * FROM documents WHERE id=$id xmlpipe_command Shell command that invokes xmlpipe stream producer. Mandatory. Applies to and source types only. Specifies a command that will be executed and which output will be parsed for documents. Refer to or for specific format description. Example: xmlpipe_command = cat /home/sphinx/test.xml xmlpipe_field xmlpipe field declaration. Multi-value, optional. Applies to source type only. Refer to . Example: xmlpipe_field = subject xmlpipe_field = content xmlpipe_field_string xmlpipe field and string attribute declaration. Multi-value, optional. Applies to source type only. Refer to . Introduced in version 1.10-beta. Makes the specified XML element indexed as both a full-text field and a string attribute. Equivalent to ]]> declaration within the XML file. Example: xmlpipe_field_string = subject xmlpipe_field_wordcount xmlpipe field and word count attribute declaration. Multi-value, optional. Applies to source type only. Refer to . Introduced in version 1.10-beta. Makes the specified XML element indexed as both a full-text field and a word count attribute. Equivalent to ]]> declaration within the XML file. Example: xmlpipe_field_wordcount = subject xmlpipe_attr_uint xmlpipe integer attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_uint. Example: xmlpipe_attr_uint = author_id xmlpipe_attr_bigint xmlpipe signed 64-bit integer attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_bigint. Example: xmlpipe_attr_bigint = my_bigint_id xmlpipe_attr_bool xmlpipe boolean attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_bool. Example: xmlpipe_attr_bool = is_deleted # will be packed to 1 bit xmlpipe_attr_timestamp xmlpipe UNIX timestamp attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_timestamp. Example: xmlpipe_attr_timestamp = published xmlpipe_attr_str2ordinal xmlpipe string ordinal attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_str2ordinal. Example: xmlpipe_attr_str2ordinal = author_sort xmlpipe_attr_float xmlpipe floating point attribute declaration. Multi-value, optional. Applies to source type only. Syntax fully matches that of sql_attr_float. Example: xmlpipe_attr_float = lat_radians xmlpipe_attr_float = long_radians xmlpipe_attr_multi xmlpipe MVA attribute declaration. Multi-value, optional. Applies to source type only. This setting declares an MVA attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and a list of integers that will constitute the MVA will be extracted, similar to how sql_attr_multi parses SQL column contents when 'field' MVA source type is specified. Example: xmlpipe_attr_multi = taglist xmlpipe_attr_multi_64 xmlpipe MVA attribute declaration. Declares the BIGINT (signed 64-bit integer) MVA attribute. Multi-value, optional. Applies to source type only. This setting declares an MVA attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and a list of integers that will constitute the MVA will be extracted, similar to how sql_attr_multi parses SQL column contents when 'field' MVA source type is specified. Example: xmlpipe_attr_multi_64 = taglist xmlpipe_attr_string xmlpipe string declaration. Multi-value, optional. Applies to source type only. Introduced in version 1.10-beta. This setting declares a string attribute tag in xmlpipe2 stream. The contents of the specified tag will be parsed and stored as a string value. Example: xmlpipe_attr_string = subject xmlpipe_fixup_utf8 Perform Sphinx-side UTF-8 validation and filtering to prevent XML parser from choking on non-UTF-8 documents. Optional, default is 0. Applies to source type only. Under certain occasions it might be hard or even impossible to guarantee that the incoming XMLpipe2 document bodies are in perfectly valid and conforming UTF-8 encoding. For instance, documents with national single-byte encodings could sneak into the stream. libexpat XML parser is fragile, meaning that it will stop processing in such cases. UTF8 fixup feature lets you avoid that. When fixup is enabled, Sphinx will preprocess the incoming stream before passing it to the XML parser and replace invalid UTF-8 sequences with spaces. Example: xmlpipe_fixup_utf8 = 1 mssql_winauth MS SQL Windows authentication flag. Boolean, optional, default value is 0 (false). Applies to source type only. Introduced in version 0.9.9-rc1. Whether to use currently logged in Windows account credentials for authentication when connecting to MS SQL Server. Note that when running searchd as a service, account user can differ from the account you used to install the service. Example: mssql_winauth = 1 mssql_unicode MS SQL encoding type flag. Boolean, optional, default value is 0 (false). Applies to source type only. Introduced in version 0.9.9-rc1. Whether to ask for Unicode or single-byte data when querying MS SQL Server. This flag must be in sync with charset_type directive; that is, to index Unicode data, you must set both in the index (to 'utf-8') and in the source (to 1). For reference, MS SQL will actually return data in UCS-2 encoding instead of UTF-8, but Sphinx will automatically handle that. Example: mssql_unicode = 1 unpack_zlib Columns to unpack using zlib (aka deflate, aka gunzip). Multi-value, optional, default value is empty list of columns. Applies to SQL source types (, , ) only. Introduced in version 0.9.9-rc1. Columns specified using this directive will be unpacked by indexer using standard zlib algorithm (called deflate and also implemented by gunzip). When indexing on a different box than the database, this lets you offload the database, and save on network traffic. The feature is only available if zlib and zlib-devel were both available during build time. Example: unpack_zlib = col1 unpack_zlib = col2 unpack_mysqlcompress Columns to unpack using MySQL UNCOMPRESS() algorithm. Multi-value, optional, default value is empty list of columns. Applies to SQL source types (, , ) only. Introduced in version 0.9.9-rc1. Columns specified using this directive will be unpacked by indexer using modified zlib algorithm used by MySQL COMPRESS() and UNCOMPRESS() functions. When indexing on a different box than the database, this lets you offload the database, and save on network traffic. The feature is only available if zlib and zlib-devel were both available during build time. Example: unpack_mysqlcompress = body_compressed unpack_mysqlcompress = description_compressed unpack_mysqlcompress_maxsize Buffer size for UNCOMPRESS()ed data. Optional, default value is 16M. Introduced in version 0.9.9-rc1. When using unpack_mysqlcompress, due to implementation intrincacies it is not possible to deduce the required buffer size from the compressed data. So the buffer must be preallocated in advance, and unpacked data can not go over the buffer size. This option lets you control the buffer size, both to limit indexer memory use, and to enable unpacking of really long data fields if necessary. Example: unpack_mysqlcompress_maxsize = 1M Index configuration options type Index type. Known values are 'plain', 'distributed', and 'rt'. Optional, default is 'plain' (plain local index). Sphinx supports several different types of indexes. Versions 0.9.x supported two index types: plain local indexes that are stored and processed on the local machine; and distributed indexes, that involve not only local searching but querying remote searchd instances over the network as well (see ). Version 1.10-beta also adds support for so-called real-time indexes (or RT indexes for short) that are also stored and processed locally, but additionally allow for on-the-fly updates of the full-text index (see ). Note that attributes can be updated on-the-fly using either plain local indexes or RT ones. Index type setting lets you choose the needed type. By default, plain local index type will be assumed. Example: type = distributed source Adds document source to local index. Multi-value, mandatory. Specifies document source to get documents from when the current index is indexed. There must be at least one source. There may be multiple sources, without any restrictions on the source types: ie. you can pull part of the data from MySQL server, part from PostgreSQL, part from the filesystem using xmlpipe2 wrapper. However, there are some restrictions on the source data. First, document IDs must be globally unique across all sources. If that condition is not met, you might get unexpected search results. Second, source schemas must be the same in order to be stored within the same index. No source ID is stored automatically. Therefore, in order to be able to tell what source the matched document came from, you will need to store some additional information yourself. Two typical approaches include: mangling document ID and encoding source ID in it: source src1 { sql_query = SELECT id*10+1, ... FROM table1 ... } source src2 { sql_query = SELECT id*10+2, ... FROM table2 ... } storing source ID simply as an attribute: source src1 { sql_query = SELECT id, 1 AS source_id FROM table1 sql_attr_uint = source_id ... } source src2 { sql_query = SELECT id, 2 AS source_id FROM table2 sql_attr_uint = source_id ... } Example: source = srcpart1 source = srcpart2 source = srcpart3 path Index files path and file name (without extension). Mandatory. Path specifies both directory and file name, but without extension. indexer will append different extensions to this path when generating final names for both permanent and temporary index files. Permanent data files have several different extensions starting with '.sp'; temporary files' extensions start with '.tmp'. It's safe to remove .tmp* files is if indexer fails to remove them automatically. For reference, different index files store the following data: .spa stores document attributes (used in extern docinfo storage mode only); .spd stores matching document ID lists for each word ID; .sph stores index header information; .spi stores word lists (word IDs and pointers to .spd file); .spk stores kill-lists; .spm stores MVA data; .spp stores hit (aka posting, aka word occurence) lists for each word ID; .sps stores string attribute data. Example: path = /var/data/test1 docinfo Document attribute values (docinfo) storage mode. Optional, default is 'extern'. Known values are 'none', 'extern' and 'inline'. Docinfo storage mode defines how exactly docinfo will be physically stored on disk and RAM. "none" means that there will be no docinfo at all (ie. no attributes). Normally you need not to set "none" explicitly because Sphinx will automatically select "none" when there are no attributes configured. "inline" means that the docinfo will be stored in the .spd file, along with the document ID lists. "extern" means that the docinfo will be stored separately (externally) from document ID lists, in a special .spa file. Basically, externally stored docinfo must be kept in RAM when querying. for performance reasons. So in some cases "inline" might be the only option. However, such cases are infrequent, and docinfo defaults to "extern". Refer to for in-depth discussion and RAM usage estimates. Example: docinfo = inline mlock Memory locking for cached data. Optional, default is 0 (do not call mlock()). For search performance, searchd preloads a copy of .spa and .spi files in RAM, and keeps that copy in RAM at all times. But if there are no searches on the index for some time, there are no accesses to that cached copy, and OS might decide to swap it out to disk. First queries to such "cooled down" index will cause swap-in and their latency will suffer. Setting mlock option to 1 makes Sphinx lock physical RAM used for that cached data using mlock(2) system call, and that prevents swapping (see man 2 mlock for details). mlock(2) is a privileged call, so it will require searchd to be either run from root account, or be granted enough privileges otherwise. If mlock() fails, a warning is emitted, but index continues working. Example: mlock = 1 morphology A list of morphology preprocessors to apply. Optional, default is empty (do not apply any preprocessor). Morphology preprocessors can be applied to the words being indexed to replace different forms of the same word with the base, normalized form. For instance, English stemmer will normalize both "dogs" and "dog" to "dog", making search results for both searches the same. Built-in preprocessors include English stemmer, Russian stemmer (that supports UTF-8 and Windows-1251 encodings), Soundex, and Metaphone. The latter two replace the words with special phonetic codes that are equal is words are phonetically close. Additional stemmers provided by Snowball project libstemmer library can be enabled at compile time using configure option. Built-in English and Russian stemmers should be faster than their libstemmer counterparts, but can produce slightly different results, because they are based on an older version. Metaphone implementation is based on Double Metaphone algorithm and indexes the primary code. Built-in values that are available for use in option are as follows: none - do not perform any morphology processing; stem_en - apply Porter's English stemmer; stem_ru - apply Porter's Russian stemmer; stem_enru - apply Porter's English and Russian stemmers; stem_cz - apply Czech stemmer; soundex - replace keywords with their SOUNDEX code; metaphone - replace keywords with their METAPHONE code. Additional values provided by libstemmer are in 'libstemmer_XXX' format, where XXX is libstemmer algorithm codename (refer to libstemmer_c/libstemmer/modules.txt for a complete list). Several stemmers can be specified (comma-separated). They will be applied to incoming words in the order they are listed, and the processing will stop once one of the stemmers actually modifies the word. Also when wordforms feature is enabled the word will be looked up in word forms dictionary first, and if there is a matching entry in the dictionary, stemmers will not be applied at all. Or in other words, wordforms can be used to implement stemming exceptions. Example: morphology = stem_en, libstemmer_sv dict The keywords dictionary type. Known values are 'crc' and 'keywords'. Optional, default is 'crc'. Introduced in version 2.0.1-beta. CRC dictionary mode (dict=crc) is the default dictionary type in Sphinx, and the only one available until version 2.0.1-beta. Keywords dictionary mode (dict=keywords) was added in 2.0.1-beta, primarly to (greatly) reduce indexing impact and enable substring searches on huge collections. They also eliminate the chance of CRC32 collisions. In 2.0.1-beta, that mode was only supported for disk indexes. Starting with 2.0.2-beta, RT indexes are also supported. CRC dictionaries never store the original keyword text in the index. Instead, keywords are replaced with their control sum value (either CRC32 or FNV64, depending whether Sphinx was built with ) both when searching and indexing, and that value is used internally in the index. That approach has two drawbacks. First, in CRC32 case there is a chance of control sum collision between several pairs of different keywords, growing quadratically with the number of unique keywords in the index. (FNV64 case is unaffected in practice, as a chance of a single FNV64 collision in a dictionary of 1 billion entries is approximately 1:16, or 6.25 percent. And most dictionaries will be much more compact that a billion keywords, as a typical spoken human language has in the region of 1 to 10 million word forms.) Second, and more importantly, substring searches are not directly possible with control sums. Sphinx alleviated that by pre-indexing all the possible substrings as separate keywords (see , directives). That actually has an added benefit of matching substrings in the quickest way possible. But at the same time pre-indexing all substrings grows the index size a lot (factors of 3-10x and even more would not be unusual) and impacts the indexing time respectively, rendering substring searches on big indexes rather impractical. Keywords dictionary, introduced in 2.0.1-beta, fixes both these drawbacks. It stores the keywords in the index and performs search-time wildcard expansion. For example, a search for a 'test*' prefix could internally expand to 'test|tests|testing' query based on the dictionary contents. That expansion is fully transparent to the application, except that the separate per-keyword statistics for all the actually matched keywords would now also be reported. Indexing with keywords dictionary should be 1.1x to 1.3x slower compared to regular, non-substring indexing - but times faster compared to substring indexing (either prefix or infix). Index size should only be slightly bigger that than of the regular non-substring index, with a 1..10% percent total difference Regular keyword searching time must be very close or identical across all three discussed index kinds (CRC non-substring, CRC substring, keywords). Substring searching time can vary greatly depending on how many actual keywords match the given substring (in other words, into how many keywords does the search term expand). The maximum number of keywords matched is restricted by the expansion_limit directive. Essentially, keywords and CRC dictionaries represent the two different trade-off substring searching decisions. You can choose to either sacrifice indexing time and index size in favor of top-speed worst-case searches (CRC dictionary), or only slightly impact indexing time but sacrifice worst-case searching time when the prefix expands into very many keywords (keywords dictionary). Example: dict = keywords index_sp Whether to detect and index sentence and paragraph boundaries. Optional, default is 0 (do not detect and index). Introduced in version 2.0.1-beta. This directive enables sentence and paragraph boundary indexing. It's required for the SENTENCE and PARAGRAPH operators to work. Sentence boundary detection is based on plain text analysis, so you only need to set index_sp = 1 to enable it. Paragraph detection is however based on HTML markup, and happens in the HTML stripper. So to index paragraph locations you also need to enable the stripper by specifying html_strip = 1. Both types of boundaries are detected based on a few built-in rules enumerated just below. Sentence boundary detection rules are as follows. Question and excalamation signs (? and !) are always a sentence boundary. Trailing dot (.) is a sentence boundary, except: When followed by a letter. That's considered a part of an abbreviation (as in "S.T.A.L.K.E.R" or "Goldman Sachs S.p.A."). When followed by a comma. That's considered an abbreviation followed by a comma (as in "Telecom Italia S.p.A., founded in 1994"). When followed by a space and a small letter. That's considered an abbreviation within a sentence (as in "News Corp. announced in Februrary"). When preceded by a space and a capital letter, and followed by a space. That's considered a middle initial (as in "John D. Doe"). Paragraph boundaries are inserted at every block-level HTML tag. Namely, those are (as taken from HTML 4 standard) ADDRESS, BLOCKQUOTE, CAPTION, CENTER, DD, DIV, DL, DT, H1, H2, H3, H4, H5, LI, MENU, OL, P, PRE, TABLE, TBODY, TD, TFOOT, TH, THEAD, TR, and UL. Both sentences and paragraphs increment the keyword position counter by 1. Example: index_sp = 1 index_zones A list of in-field HTML/XML zones to index. Optional, default is empty (do not index zones). Introduced in version 2.0.1-beta. Zones can be formally defined as follows. Everything between an opening and a matching closing tag is called a span, and the aggregate of all spans corresponding sharing the same tag name is called a zone. For instance, everything between the occurrences of <H1> and </H1> in the document field belongs to H1 zone. Zone indexing, enabled by index_zones directive, is an optional extension of the HTML stripper. So it will also require that the stripper is enabled (with html_strip = 1). The value of the index_zones should be a comma-separated list of those tag names and wildcards (ending with a star) that should be indexed as zones. Zones can nest and overlap arbitrarily. The only requirement is that every opening tag has a matching tag. You can also have an arbitrary number of both zones (as in unique zone names, such as H1) and spans (all the occurrences of those H1 tags) in a document. Once indexed, zones can then be used for matching with the ZONE operator, see . Example: index_zones = h*, th, title min_stemming_len Minimum word length at which to enable stemming. Optional, default is 1 (stem everything). Introduced in version 0.9.9-rc1. Stemmers are not perfect, and might sometimes produce undesired results. For instance, running "gps" keyword through Porter stemmer for English results in "gp", which is not really the intent. feature lets you suppress stemming based on the source word length, ie. to avoid stemming too short words. Keywords that are shorter than the given threshold will not be stemmed. Note that keywords that are exactly as long as specified will be stemmed. So in order to avoid stemming 3-character keywords, you should specify 4 for the value. For more finely grained control, refer to wordforms feature. Example: min_stemming_len = 4 stopwords Stopword files list (space separated). Optional, default is empty. Stopwords are the words that will not be indexed. Typically you'd put most frequent words in the stopwords list because they do not add much value to search results but consume a lot of resources to process. You can specify several file names, separated by spaces. All the files will be loaded. Stopwords file format is simple plain text. The encoding must match index encoding specified in charset_type. File data will be tokenized with respect to charset_table settings, so you can use the same separators as in the indexed data. The stemmers will also be applied when parsing stopwords file. While stopwords are not indexed, they still do affect the keyword positions. For instance, assume that "the" is a stopword, that document 1 contains the line "in office", and that document 2 contains "in the office". Searching for "in office" as for exact phrase will only return the first document, as expected, even though "the" in the second one is stopped. Stopwords files can either be created manually, or semi-automatically. indexer provides a mode that creates a frequency dictionary of the index, sorted by the keyword frequency, see and switch in . Top keywords from that dictionary can usually be used as stopwords. Example: stopwords = /usr/local/sphinx/data/stopwords.txt stopwords = stopwords-ru.txt stopwords-en.txt wordforms Word forms dictionary. Optional, default is empty. Word forms are applied after tokenizing the incoming text by charset_table rules. They essentialy let you replace one word with another. Normally, that would be used to bring different word forms to a single normal form (eg. to normalize all the variants such as "walks", "walked", "walking" to the normal form "walk"). It can also be used to implement stemming exceptions, because stemming is not applied to words found in the forms list. Dictionaries are used to normalize incoming words both during indexing and searching. Therefore, to pick up changes in wordforms file it's required to reindex and restart searchd. Word forms support in Sphinx is designed to support big dictionaries well. They moderately affect indexing speed: for instance, a dictionary with 1 million entries slows down indexing about 1.5 times. Searching speed is not affected at all. Additional RAM impact is roughly equal to the dictionary file size, and dictionaries are shared across indexes: ie. if the very same 50 MB wordforms file is specified for 10 different indexes, additional searchd RAM usage will be about 50 MB. Dictionary file should be in a simple plain text format. Each line should contain source and destination word forms, in exactly the same encoding as specified in charset_type, separated by "greater" sign. Rules from the charset_table will be applied when the file is loaded. So basically it's as case sensitive as your other full-text indexed data, ie. typically case insensitive. Here's the file contents sample: walks > walk walked > walk walking > walk There is a bundled spelldump utility that helps you create a dictionary file in the format Sphinx can read from source .dict and .aff dictionary files in ispell or MySpell format (as bundled with OpenOffice). Starting with version 0.9.9-rc1, you can map several source words to a single destination word. Because the work happens on tokens, not the source text, differences in whitespace and markup are ignored. core 2 duo > c2d e6600 > c2d core 2duo > c2d Notice however that the destination wordforms are still always interpreted as a single keyword! Having a mapping like "St John > Saint John" will result in not matching "St John" when searching for "Saint" or "John", because the destination keyword will be "Saint John" with a space character in it (and it's barely possible to input a destination keyword with a space). Example: wordforms = /usr/local/sphinx/data/wordforms.txt exceptions Tokenizing exceptions file. Optional, default is empty. Exceptions allow to map one or more tokens (including tokens with characters that would normally be excluded) to a single keyword. They are similar to wordforms in that they also perform mapping, but have a number of important differences. Short summary of the differences is as follows: exceptions are case sensitive, wordforms are not; exceptions can use special characters that are not in charset_table, wordforms fully obey charset_table; exceptions can underperform on huge dictionaries, wordforms handle millions of entries well. The expected file format is also plain text, with one line per exception, and the line format is as follows: map-from-tokens => map-to-token Example file: AT & T => AT&T AT&T => AT&T Standarten Fuehrer => standartenfuhrer Standarten Fuhrer => standartenfuhrer MS Windows => ms windows Microsoft Windows => ms windows C++ => cplusplus c++ => cplusplus C plus plus => cplusplus All tokens here are case sensitive: they will not be processed by charset_table rules. Thus, with the example exceptions file above, "At&t" text will be tokenized as two keywords "at" and "t", because of lowercase letters. On the other hand, "AT&T" will match exactly and produce single "AT&T" keyword. Note that this map-to keyword is a) always interpereted as a single word, and b) is both case and space sensitive! In our sample, "ms windows" query will not match the document with "MS Windows" text. The query will be interpreted as a query for two keywords, "ms" and "windows". And what "MS Windows" gets mapped to is a single keyword "ms windows", with a space in the middle. On the other hand, "standartenfuhrer" will retrieve documents with "Standarten Fuhrer" or "Standarten Fuehrer" contents (capitalized exactly like this), or any capitalization variant of the keyword itself, eg. "staNdarTenfUhreR". (It won't catch "standarten fuhrer", however: this text does not match any of the listed exceptions because of case sensitivity, and gets indexed as two separate keywords.) Whitespace in the map-from tokens list matters, but its amount does not. Any amount of the whitespace in the map-form list will match any other amount of whitespace in the indexed document or query. For instance, "AT & T" map-from token will match "AT    &  T" text, whatever the amount of space in both map-from part and the indexed text. Such text will therefore be indexed as a special "AT&T" keyword, thanks to the very first entry from the sample. Exceptions also allow to capture special characters (that are exceptions from general charset_table rules; hence the name). Assume that you generally do not want to treat '+' as a valid character, but still want to be able search for some exceptions from this rule such as 'C++'. The sample above will do just that, totally independent of what characters are in the table and what are not. Exceptions are applied to raw incoming document and query data during indexing and searching respectively. Therefore, to pick up changes in the file it's required to reindex and restart searchd. Example: exceptions = /usr/local/sphinx/data/exceptions.txt min_word_len Minimum indexed word length. Optional, default is 1 (index everything). Only those words that are not shorter than this minimum will be indexed. For instance, if min_word_len is 4, then 'the' won't be indexed, but 'they' will be. Example: min_word_len = 4 charset_type Character set encoding type. Optional, default is 'sbcs'. Known values are 'sbcs' and 'utf-8'. Different encodings have different methods for mapping their internal characters codes into specific byte sequences. Two most common methods in use today are single-byte encoding and UTF-8. Their corresponding charset_type values are 'sbcs' (stands for Single Byte Character Set) and 'utf-8'. The selected encoding type will be used everywhere where the index is used: when indexing the data, when parsing the query against this index, when generating snippets, etc. Note that while 'utf-8' implies that the decoded values must be treated as Unicode codepoint numbers, there's a family of 'sbcs' encodings that may in turn treat different byte values differently, and that should be properly reflected in your charset_table settings. For example, the same byte value of 224 (0xE0 hex) maps to different Russian letters depending on whether koi-8r or windows-1251 encoding is used. Example: charset_type = utf-8 charset_table Accepted characters table, with case folding rules. Optional, default value depends on charset_type value. charset_table is the main workhorse of Sphinx tokenizing process, ie. the process of extracting keywords from document text or query txet. It controls what characters are accepted as valid and what are not, and how the accepted characters should be transformed (eg. should the case be removed or not). You can think of charset_table as of a big table that has a mapping for each and every of 100K+ characters in Unicode (or as of a small 256-character table if you're using SBCS). By default, every character maps to 0, which means that it does not occur within keywords and should be treated as a separator. Once mentioned in the table, character is mapped to some other character (most frequently, either to itself or to a lowercase letter), and is treated as a valid keyword part. The expected value format is a commas-separated list of mappings. Two simplest mappings simply declare a character as valid, and map a single character to another single character, respectively. But specifying the whole table in such form would result in bloated and barely manageable specifications. So there are several syntax shortcuts that let you map ranges of characters at once. The complete list is as follows: A->a Single char mapping, declares source char 'A' as allowed to occur within keywords and maps it to destination char 'a' (but does not declare 'a' as allowed). A..Z->a..z Range mapping, declares all chars in source range as allowed and maps them to the destination range. Does not declare destination range as allowed. Also checks ranges' lengths (the lengths must be equal). a Stray char mapping, declares a character as allowed and maps it to itself. Equivalent to a->a single char mapping. a..z Stray range mapping, declares all characters in range as allowed and maps them to themselves. Equivalent to a..z->a..z range mapping. A..Z/2 Checkerboard range map. Maps every pair of chars to the second char. More formally, declares odd characters in range as allowed and maps them to the even ones; also declares even characters as allowed and maps them to themselves. For instance, A..Z/2 is equivalent to A->B, B->B, C->D, D->D, ..., Y->Z, Z->Z. This mapping shortcut is helpful for a number of Unicode blocks where uppercase and lowercase letters go in such interleaved order instead of contiguous chunks. Control characters with codes from 0 to 31 are always treated as separators. Characters with codes 32 to 127, ie. 7-bit ASCII characters, can be used in the mappings as is. To avoid configuration file encoding issues, 8-bit ASCII characters and Unicode characters must be specified in U+xxx form, where 'xxx' is hexadecimal codepoint number. This form can also be used for 7-bit ASCII characters to encode special ones: eg. use U+20 to encode space, U+2E to encode dot, U+2C to encode comma. Example: # 'sbcs' defaults for English and Russian charset_table = 0..9, A..Z->a..z, _, a..z, \ U+A8->U+B8, U+B8, U+C0..U+DF->U+E0..U+FF, U+E0..U+FF # 'utf-8' defaults for English and Russian charset_table = 0..9, A..Z->a..z, _, a..z, \ U+410..U+42F->U+430..U+44F, U+430..U+44F ignore_chars Ignored characters list. Optional, default is empty. Useful in the cases when some characters, such as soft hyphenation mark (U+00AD), should be not just treated as separators but rather fully ignored. For example, if '-' is simply not in the charset_table, "abc-def" text will be indexed as "abc" and "def" keywords. On the contrary, if '-' is added to ignore_chars list, the same text will be indexed as a single "abcdef" keyword. The syntax is the same as for charset_table, but it's only allowed to declare characters, and not allowed to map them. Also, the ignored characters must not be present in charset_table. Example: ignore_chars = U+AD min_prefix_len Minimum word prefix length to index. Optional, default is 0 (do not index prefixes). Prefix indexing allows to implement wildcard searching by 'wordstart*' wildcards (refer to enable_star option for details on wildcard syntax). When mininum prefix length is set to a positive number, indexer will index all the possible keyword prefixes (ie. word beginnings) in addition to the keywords themselves. Too short prefixes (below the minimum allowed length) will not be indexed. For instance, indexing a keyword "example" with min_prefix_len=3 will result in indexing "exa", "exam", "examp", "exampl" prefixes along with the word itself. Searches against such index for "exam" will match documents that contain "example" word, even if they do not contain "exam" on itself. However, indexing prefixes will make the index grow significantly (because of many more indexed keywords), and will degrade both indexing and searching times. There's no automatic way to rank perfect word matches higher in a prefix index, but there's a number of tricks to achieve that. First, you can setup two indexes, one with prefix indexing and one without it, search through both, and use SetIndexWeights() call to combine weights. Second, you can enable star-syntax and rewrite your extended-mode queries: # in sphinx.conf enable_star = 1 // in query $cl->Query ( "( keyword | keyword* ) other keywords" ); Example: min_prefix_len = 3 min_infix_len Minimum infix prefix length to index. Optional, default is 0 (do not index infixes). Infix indexing allows to implement wildcard searching by 'start*', '*end', and '*middle*' wildcards (refer to enable_star option for details on wildcard syntax). When mininum infix length is set to a positive number, indexer will index all the possible keyword infixes (ie. substrings) in addition to the keywords themselves. Too short infixes (below the minimum allowed length) will not be indexed. For instance, indexing a keyword "test" with min_infix_len=2 will result in indexing "te", "es", "st", "tes", "est" infixes along with the word itself. Searches against such index for "es" will match documents that contain "test" word, even if they do not contain "es" on itself. However, indexing infixes will make the index grow significantly (because of many more indexed keywords), and will degrade both indexing and searching times. There's no automatic way to rank perfect word matches higher in an infix index, but the same tricks as with prefix indexes can be applied. Example: min_infix_len = 3 prefix_fields The list of full-text fields to limit prefix indexing to. Optional, default is empty (index all fields in prefix mode). Because prefix indexing impacts both indexing and searching performance, it might be desired to limit it to specific full-text fields only: for instance, to provide prefix searching through URLs, but not through page contents. prefix_fields specifies what fields will be prefix-indexed; all other fields will be indexed in normal mode. The value format is a comma-separated list of field names. Example: prefix_fields = url, domain infix_fields The list of full-text fields to limit infix indexing to. Optional, default is empty (index all fields in infix mode). Similar to prefix_fields, but lets you limit infix-indexing to given fields. Example: infix_fields = url, domain enable_star Enables star-syntax (or wildcard syntax) when searching through prefix/infix indexes. Optional, default is is 0 (do not use wildcard syntax), for compatibility with 0.9.7. Known values are 0 and 1. This feature enables "star-syntax", or wildcard syntax, when searching through indexes which were created with prefix or infix indexing enabled. It only affects searching; so it can be changed without reindexing by simply restarting searchd. The default value is 0, that means to disable star-syntax and treat all keywords as prefixes or infixes respectively, depending on indexing-time min_prefix_len and min_infix_len settings. The value of 1 means that star ('*') can be used at the start and/or the end of the keyword. The star will match zero or more characters. For example, assume that the index was built with infixes and that enable_star is 1. Searching should work as follows: "abcdef" query will match only those documents that contain the exact "abcdef" word in them. "abc*" query will match those documents that contain any words starting with "abc" (including the documents which contain the exact "abc" word only); "*cde*" query will match those documents that contain any words which have "cde" characters in any part of the word (including the documents which contain the exact "cde" word only). "*def" query will match those documents that contain any words ending with "def" (including the documents that contain the exact "def" word only). Example: enable_star = 1 ngram_len N-gram lengths for N-gram indexing. Optional, default is 0 (disable n-gram indexing). Known values are 0 and 1 (other lengths to be implemented). N-grams provide basic CJK (Chinese, Japanese, Korean) support for unsegmented texts. The issue with CJK searching is that there could be no clear separators between the words. Ideally, the texts would be filtered through a special program called segmenter that would insert separators in proper locations. However, segmenters are slow and error prone, and it's common to index contiguous groups of N characters, or n-grams, instead. When this feature is enabled, streams of CJK characters are indexed as N-grams. For example, if incoming text is "ABCDEF" (where A to F represent some CJK characters) and length is 1, in will be indexed as if it was "A B C D E F". (With length equal to 2, it would produce "AB BC CD DE EF"; but only 1 is supported at the moment.) Only those characters that are listed in ngram_chars table will be split this way; other ones will not be affected. Note that if search query is segmented, ie. there are separators between individual words, then wrapping the words in quotes and using extended mode will resut in proper matches being found even if the text was not segmented. For instance, assume that the original query is BC DEF. After wrapping in quotes on the application side, it should look like "BC" "DEF" (with quotes). This query will be passed to Sphinx and internally split into 1-grams too, resulting in "B C" "D E F" query, still with quotes that are the phrase matching operator. And it will match the text even though there were no separators in the text. Even if the search query is not segmented, Sphinx should still produce good results, thanks to phrase based ranking: it will pull closer phrase matches (which in case of N-gram CJK words can mean closer multi-character word matches) to the top. Example: ngram_len = 1 ngram_chars N-gram characters list. Optional, default is empty. To be used in conjunction with in ngram_len, this list defines characters, sequences of which are subject to N-gram extraction. Words comprised of other characters will not be affected by N-gram indexing feature. The value format is identical to charset_table. Example: ngram_chars = U+3000..U+2FA1F phrase_boundary Phrase boundary characters list. Optional, default is empty. This list controls what characters will be treated as phrase boundaries, in order to adjust word positions and enable phrase-level search emulation through proximity search. The syntax is similar to charset_table. Mappings are not allowed and the boundary characters must not overlap with anything else. On phrase boundary, additional word position increment (specified by phrase_boundary_step) will be added to current word position. This enables phrase-level searching through proximity queries: words in different phrases will be guaranteed to be more than phrase_boundary_step distance away from each other; so proximity search within that distance will be equivalent to phrase-level search. Phrase boundary condition will be raised if and only if such character is followed by a separator; this is to avoid abbreviations such as S.T.A.L.K.E.R or URLs being treated as several phrases. Example: phrase_boundary = ., ?, !, U+2026 # horizontal ellipsis phrase_boundary_step Phrase boundary word position increment. Optional, default is 0. On phrase boundary, current word position will be additionally incremented by this number. See phrase_boundary for details. Example: phrase_boundary_step = 100 html_strip Whether to strip HTML markup from incoming full-text data. Optional, default is 0. Known values are 0 (disable stripping) and 1 (enable stripping). Both HTML tags and entities and considered markup and get processed. HTML tags are removed, their contents (i.e., everything between <P> and </P>) are left intact by default. You can choose to keep and index attributes of the tags (e.g., HREF attribute in an A tag, or ALT in an IMG one). Several well-known inline tags are completely removed, all other tags are treated as block level and replaced with whitespace. For example, 'te<B>st</B>' text will be indexed as a single keyword 'test', however, 'te<P>st</P>' will be indexed as two keywords 'te' and 'st'. Known inline tags are as follows: A, B, I, S, U, BASEFONT, BIG, EM, FONT, IMG, LABEL, SMALL, SPAN, STRIKE, STRONG, SUB, SUP, TT. HTML entities get decoded and replaced with corresponding UTF-8 characters. Stripper supports both numeric forms (such as &#239;) and text forms (such as &oacute; or &nbsp;). All entities as specified by HTML4 standard are supported. Stripping does not work with source type (it's suggested to upgrade to xmlpipe2 anyway). It should work with properly formed HTML and XHTML, but, just as most browsers, may produce unexpected results on malformed input (such as HTML with stray <'s or unclosed >'s). Only the tags themselves, and also HTML comments, are stripped. To strip the contents of the tags too (eg. to strip embedded scripts), see html_remove_elements option. There are no restrictions on tag names; ie. everything that looks like a valid tag start, or end, or a comment will be stripped. Example: html_strip = 1 html_index_attrs A list of markup attributes to index when stripping HTML. Optional, default is empty (do not index markup attributes). Specifies HTML markup attributes whose contents should be retained and indexed even though other HTML markup is stripped. The format is per-tag enumeration of indexable attributes, as shown in the example below. Example: html_index_attrs = img=alt,title; a=title; html_remove_elements A list of HTML elements for which to strip contents along with the elements themselves. Optional, default is empty string (do not strip contents of any elements). This feature allows to strip element contents, ie. everything that is between the opening and the closing tags. It is useful to remove embedded scripts, CSS, etc. Short tag form for empty elements (ie. <br />) is properly supported; ie. the text that follows such tag will not be removed. The value is a comma-separated list of element (tag) names whose contents should be removed. Tag names are case insensitive. Example: html_remove_elements = style, script local Local index declaration in the distributed index. Multi-value, optional, default is empty. This setting is used to declare local indexes that will be searched when given distributed index is searched. Many local indexes can be declared per each distributed index. Any local index can also be mentioned several times in different distributed indexes. Note that by default all local indexes will be searched sequentially, utilizing only 1 CPU or core. To parallelize processing of the local parts in the distributed index, you should use directive, see . Before , there also was a legacy solution to configure searchd to query itself instead of using local indexes (refer to for the details). However, that creates redundant CPU and network load, and is now strongly suggested instead. Example: local = chunk1 local = chunk2 agent Remote agent declaration in the distributed index. Multi-value, optional, default is empty. This setting is used to declare remote agents that will be searched when given distributed index is searched. The agents can be thought of as network pointers that specify host, port, and index names. In the basic case agents would correspond to remote physical machines. More formally, that is not always correct: you can point several agents to the same remote machine; or you can even point agents to the very same single instance of searchd (in order to utilize many CPUs or cores). The value format is as follows: agent = specification:remote-indexes-list specification = hostname ":" port | path Where 'hostname' is remote host name; 'port' is remote TCP port; 'path' is Unix-domain socket path and 'remote-indexes-list' is a comma-separated list of remote index names. All agents will be searched in parallel. However, all indexes specified for a given agent will be searched sequentially in this agent. This lets you fine-tune the configuration to the hardware. For instance, if two remote indexes are stored on the same physical HDD, it's better to configure one agent with several sequentially searched indexes to avoid HDD steping. If they are stored on different HDDs, having two agents will be advantageous, because the work will be fully parallelized. The same applies to CPUs; though CPU performance impact caused by two processes stepping on each other is somewhat smaller and frequently can be ignored at all. On machines with many CPUs and/or HDDs, agents can be pointed to the same machine to utilize all of the hardware in parallel and reduce query latency. There is no need to setup several searchd instances for that; it's legal to configure the instance to contact itself. Here's an example setup, intended for a 4-CPU machine, that will use up to 4 CPUs in parallel to process each query: index dist { type = distributed local = chunk1 agent = localhost:9312:chunk2 agent = localhost:9312:chunk3 agent = localhost:9312:chunk4 } Note how one of the chunks is searched locally and the same instance of searchd queries itself to launch searches through three other ones in parallel. Example: agent = localhost:9312:chunk2 # contact itself agent = /var/run/searchd.s:chunk2 agent = searchbox2:9312:chunk3,chunk4 # search remote indexes agent_blackhole Remote blackhole agent declaration in the distributed index. Multi-value, optional, default is empty. Introduced in version 0.9.9-rc1. lets you fire-and-forget queries to remote agents. That is useful for debugging (or just testing) production clusters: you can setup a separate debugging/testing searchd instance, and forward the requests to this instance from your production master (aggregator) instance without interfering with production work. Master searchd will attempt to connect and query blackhole agent normally, but it will neither wait nor process any responses. Also, all network errors on blackhole agents will be ignored. The value format is completely identical to regular agent directive. Example: agent_blackhole = testbox:9312:testindex1,testindex2 agent_connect_timeout Remote agent connection timeout, in milliseconds. Optional, default is 1000 (ie. 1 second). When connecting to remote agents, searchd will wait at most this much time for connect() call to complete succesfully. If the timeout is reached but connect() does not complete, and retries are enabled, retry will be initiated. Example: agent_connect_timeout = 300 agent_query_timeout Remote agent query timeout, in milliseconds. Optional, default is 3000 (ie. 3 seconds). After connection, searchd will wait at most this much time for remote queries to complete. This timeout is fully separate from connection timeout; so the maximum possible delay caused by a remote agent equals to the sum of agent_connection_timeout and agent_query_timeout. Queries will not be retried if this timeout is reached; a warning will be produced instead. Example: agent_query_timeout = 10000 # our query can be long, allow up to 10 sec preopen Whether to pre-open all index files, or open them per each query. Optional, default is 0 (do not preopen). This option tells searchd that it should pre-open all index files on startup (or rotation) and keep them open while it runs. Currently, the default mode is not to pre-open the files (this may change in the future). Preopened indexes take a few (currently 2) file descriptors per index. However, they save on per-query open() calls; and also they are invulnerable to subtle race conditions that may happen during index rotation under high load. On the other hand, when serving many indexes (100s to 1000s), it still might be desired to open the on per-query basis in order to save file descriptors. This directive does not affect indexer in any way, it only affects searchd. Example: preopen = 1 ondisk_dict Whether to keep the dictionary file (.spi) for this index on disk, or precache it in RAM. Optional, default is 0 (precache in RAM). Introduced in version 0.9.9-rc1. The dictionary (.spi) can be either kept on RAM or on disk. The default is to fully cache it in RAM. That improves performance, but might cause too much RAM pressure, especially if prefixes or infixes were used. Enabling results in 1 additional disk IO per keyword per query, but reduces memory footprint. This directive does not affect indexer in any way, it only affects searchd. Example: ondisk_dict = 1 inplace_enable Whether to enable in-place index inversion. Optional, default is 0 (use separate temporary files). Introduced in version 0.9.9-rc1. greatly reduces indexing disk footprint, at a cost of slightly slower indexing (it uses around 2x less disk, but yields around 90-95% the original performance). Indexing involves two major phases. The first phase collects, processes, and partially sorts documents by keyword, and writes the intermediate result to temporary files (.tmp*). The second phase fully sorts the documents, and creates the final index files. Thus, rebuilding a production index on the fly involves around 3x peak disk footprint: 1st copy for the intermediate temporary files, 2nd copy for newly constructed copy, and 3rd copy for the old index that will be serving production queries in the meantime. (Intermediate data is comparable in size to the final index.) That might be too much disk footprint for big data collections, and allows to reduce it. When enabled, it reuses the temporary files, outputs the final data back to them, and renames them on completion. However, this might require additional temporary data chunk relocation, which is where the performance impact comes from. This directive does not affect searchd in any way, it only affects indexer. Example: inplace_enable = 1 inplace_hit_gap In-place inversion fine-tuning option. Controls preallocated hitlist gap size. Optional, default is 0. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: inplace_hit_gap = 1M inplace_docinfo_gap In-place inversion fine-tuning option. Controls preallocated docinfo gap size. Optional, default is 0. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: inplace_docinfo_gap = 1M inplace_reloc_factor In-place inversion fine-tuning option. Controls relocation buffer size within indexing memory arena. Optional, default is 0.1. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: inplace_reloc_factor = 0.1 inplace_write_factor In-place inversion fine-tuning option. Controls in-place write buffer size within indexing memory arena. Optional, default is 0.1. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: inplace_write_factor = 0.1 index_exact_words Whether to index the original keywords along with the stemmed/remapped versions. Optional, default is 0 (do not index). Introduced in version 0.9.9-rc1. When enabled, forces indexer to put the raw keywords in the index along with the stemmed versions. That, in turn, enables exact form operator in the query language to work. This impacts the index size and the indexing time. However, searching performance is not impacted at all. Example: index_exact_words = 1 overshort_step Position increment on overshort (less that min_word_len) keywords. Optional, allowed values are 0 and 1, default is 1. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: overshort_step = 1 stopword_step Position increment on stopwords. Optional, allowed values are 0 and 1, default is 1. Introduced in version 0.9.9-rc1. This directive does not affect searchd in any way, it only affects indexer. Example: stopword_step = 1 hitless_words Hitless words list. Optional, allowed values are 'all', or a list file name. Introduced in version 1.10-beta. By default, Sphinx full-text index stores not only a list of matching documents for every given keyword, but also a list of its in-document positions (aka hitlist). Hitlists enables phrase, proximity, strict order and other advanced types of searching, as well as phrase proximity ranking. However, hitlists for specific frequent keywords (that can not be stopped for some reason despite being frequent) can get huge and thus slow to process while querying. Also, in some cases we might only care about boolean keyword matching, and never need position-based searching operators (such as phrase matching) nor phrase ranking. lets you create indexes that either do not have positional information (hitlists) at all, or skip it for specific keywords. Hitless index will generally use less space than the respective regular index (about 1.5x can be expected). Both indexing and searching should be faster, at a cost of missing positional query and ranking support. When searching, positional queries (eg. phrase queries) will be automatically converted to respective non-positional (document-level) or combined queries. For instance, if keywords "hello" and "world" are hitless, "hello world" phrase query will be converted to (hello & world) bag-of-words query, matching all documents that mention either of the keywords but not necessarily the exact phrase. And if, in addition, keywords "simon" and "says" are not hitless, "simon says hello world" will be converted to ("simon says" & hello & world) query, matching all documents that contain "hello" and "world" anywhere in the document, and also "simon says" as an exact phrase. Example: hitless_words = all expand_keywords Expand keywords with exact forms and/or stars when possible. Optional, default is 0 (do not expand keywords). Introduced in version 1.10-beta. Queries against indexes with feature enabled are internally expanded as follows. If the index was built with prefix or infix indexing enabled, every keyword gets internally replaced with a disjunction of keyword itself and a respective prefix or infix (keyword with stars). If the index was built with both stemming and index_exact_words enabled, exact form is also added. Here's an example that shows how internal expansion works when all of the above (infixes, stemming, and exact words) are combined: running -> ( running | *running* | =running ) Expanded queries take naturally longer to complete, but can possibly improve the search quality, as the documents with exact form matches should be ranked generally higher than documents with stemmed or infix matches. Note that the existing query syntax does not allowe to emulate this kind of expansion, because internal expansion works on keyword level and expands keywords within phrase or quorum operators too (which is not possible through the query syntax). This directive does not affect indexer in any way, it only affects searchd. Example: expand_keywords = 1 blend_chars Blended characters list. Optional, default is empty. Introduced in version 1.10-beta. Blended characters are indexed both as separators and valid characters. For instance, assume that & is configured as blended and AT&T occurs in an indexed document. Three different keywords will get indexed, namely "at&t", treating blended characters as valid, plus "at" and "t", treating them as separators. Positions for tokens obtained by replacing blended characters with whitespace are assigned as usual, so regular keywords will be indexed just as if there was no specified at all. An additional token that mixes blended and non-blended characters will be put at the starting position. For instance, if the field contents are "AT&T company" occurs in the very beginning of the text field, "at" will be given position 1, "t" position 2, "company" positin 3, and "AT&T" will also be given position 1 ("blending" with the opening regular keyword). Thus, querying for either AT&T or just AT will match that document, and querying for "AT T" as a phrase also match it. Last but not least, phrase query for "AT&T company" will also match it, despite the position Blended characters can overlap with special characters used in query syntax (think of T-Mobile or @twitter). Where possible, query parser will automatically handle blended character as blended. For instance, "hello @twitter" within quotes (a phrase operator) would handle @-sign as blended, because @-syntax for field operator is not allowed within phrases. Otherwise, the character would be handled as an operator. So you might want to escape the keywords. Starting with version 2.0.1-beta, blended characters can be remapped, so that multiple different blended characters could be normalized into just one base form. This is useful when indexing multiple alternative Unicode codepoints with equivalent glyphs. Example: blend_chars = +, &, U+23 blend_chars = +, &->+ # 2.0.1 and above blend_mode Blended tokens indexing mode. Optional, default is . Introduced in version 2.0.1-beta. By default, tokens that mix blended and non-blended characters get indexed in there entirety. For instance, when both at-sign and an exclamation are in , "@dude!" will get result in two tokens indexed: "@dude!" (with all the blended characters) and "dude" (without any). Therefore "@dude" query will not match it. directive adds flexibility to this indexing behavior. It takes a comma-separated list of options. blend_mode = option [, option [, ...]] option = trim_none | trim_head | trim_tail | trim_both | skip_pure Options specify token indexing variants. If multiple options are specified, multiple variants of the same token will be indexed. Regular keywords (resulting from that token by replacing blended with whitespace) are always be indexed. trim_none Index the entire token. trim_head Trim heading blended characters, and index the resulting token. trim_tail Trim trailing blended characters, and index the resulting token. trim_both Trim both heading and trailing blended characters, and index the resulting token. skip_pure Do not index the token if it's purely blended, that is, consists of blended characters only. Returning to the "@dude!" example above, setting will result in two tokens being indexed, "@dude" and "dude!". In this particular example, would have no effect, because trimming both blended characters results in "dude" which is already indexed as a regular keyword. Indexing "@U.S.A." with (and assuming that dot is blended two) would result in "U.S.A" being indexed. Last but not least, enables you to fully ignore sequences of blended characters only. For example, "one @@@ two" would be indexed exactly as "one two", and match that as a phrase. That is not the case by default because a fully blended token gets indexed and offsets the second keyword position. Default behavior is to index the entire token, equivalent to . Example: blend_mode = trim_tail, skip_pure rt_mem_limit RAM chunk size limit. Optional, default is empty. Introduced in version 1.10-beta. RT index keeps some data in memory (so-called RAM chunk) and also maintains a number of on-disk indexes (so-called disk chunks). This directive lets you control the RAM chunk size. Once there's too much data to keep in RAM, RT index will flush it to disk, activate a newly created disk chunk, and reset the RAM chunk. The limit is pretty strict; RT index should never allocate more memory than it's limited to. The memory is not preallocated either, hence, specifying 512 MB limit and only inserting 3 MB of data should result in allocating 3 MB, not 512 MB. Example: rt_mem_limit = 512M rt_field Full-text field declaration. Multi-value, mandatory Introduced in version 1.10-beta. Full-text fields to be indexed are declared using directive. The names must be unique. The order is preserved; and so field values in INSERT statements without an explicit list of inserted columns will have to be in the same order as configured. Example: rt_field = author rt_field = title rt_field = content rt_attr_uint Unsigned integer attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares an unsigned 32-bit attribute. Introduced in version 1.10-beta. Example: rt_attr_uint = gid rt_attr_bigint BIGINT attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares a signed 64-bit attribute. Introduced in version 1.10-beta. Example: rt_attr_bigint = guid rt_attr_float Floating point attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Declares a single precision, 32-bit IEEE 754 format float attribute. Introduced in version 1.10-beta. Example: rt_attr_float = gpa rt_attr_multi Multi-valued attribute (MVA) declaration. Declares the UNSIGNED INTEGER (unsigned 32-bit) MVA attribute. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to RT indexes only. Example: rt_attr_multi = my_tags rt_attr_multi_64 Multi-valued attribute (MVA) declaration. Declares the BIGINT (signed 64-bit) MVA attribute. Multi-value (ie. there may be more than one such attribute declared), optional. Applies to RT indexes only. Example: rt_attr_multi_64 = my_wide_tags rt_attr_timestamp Timestamp attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Introduced in version 1.10-beta. Example: rt_attr_timestamp = date_added rt_attr_string String attribute declaration. Multi-value (an arbitrary number of attributes is allowed), optional. Introduced in version 1.10-beta. Example: rt_attr_string = author <filename>indexer</filename> program configuration options mem_limit Indexing RAM usage limit. Optional, default is 32M. Enforced memory usage limit that the indexer will not go above. Can be specified in bytes, or kilobytes (using K postfix), or megabytes (using M postfix); see the example. This limit will be automatically raised if set to extremely low value causing I/O buffers to be less than 8 KB; the exact lower bound for that depends on the indexed data size. If the buffers are less than 256 KB, a warning will be produced. Maximum possible limit is 2047M. Too low values can hurt indexing speed, but 256M to 1024M should be enough for most if not all datasets. Setting this value too high can cause SQL server timeouts. During the document collection phase, there will be periods when the memory buffer is partially sorted and no communication with the database is performed; and the database server can timeout. You can resolve that either by raising timeouts on SQL server side or by lowering mem_limit. Example: mem_limit = 256M # mem_limit = 262144K # same, but in KB # mem_limit = 268435456 # same, but in bytes max_iops Maximum I/O operations per second, for I/O throttling. Optional, default is 0 (unlimited). I/O throttling related option. It limits maximum count of I/O operations (reads or writes) per any given second. A value of 0 means that no limit is imposed. indexer can cause bursts of intensive disk I/O during indexing, and it might desired to limit its disk activity (and keep something for other programs running on the same machine, such as searchd). I/O throttling helps to do that. It works by enforcing a minimum guaranteed delay between subsequent disk I/O operations performed by indexer. Modern SATA HDDs are able to perform up to 70-100+ I/O operations per second (that's mostly limited by disk heads seek time). Limiting indexing I/O to a fraction of that can help reduce search performance dedgradation caused by indexing. Example: max_iops = 40 max_iosize Maximum allowed I/O operation size, in bytes, for I/O throttling. Optional, default is 0 (unlimited). I/O throttling related option. It limits maximum file I/O operation (read or write) size for all operations performed by indexer. A value of 0 means that no limit is imposed. Reads or writes that are bigger than the limit will be split in several smaller operations, and counted as several operation by max_iops setting. At the time of this writing, all I/O calls should be under 256 KB (default internal buffer size) anyway, so max_iosize values higher than 256 KB must not affect anything. Example: max_iosize = 1048576 max_xmlpipe2_field Maximum allowed field size for XMLpipe2 source type, bytes. Optional, default is 2 MB. Example: max_xmlpipe2_field = 8M write_buffer Write buffer size, bytes. Optional, default is 1 MB. Write buffers are used to write both temporary and final index files when indexing. Larger buffers reduce the number of required disk writes. Memory for the buffers is allocated in addition to mem_limit. Note that several (currently up to 4) buffers for different files will be allocated, proportionally increasing the RAM usage. Example: write_buffer = 4M max_file_field_buffer Maximum file field adaptive buffer size, bytes. Optional, default is 8 MB, minimum is 1 MB. File field buffer is used to load files referred to from sql_file_field columns. This buffer is adaptive, starting at 1 MB at first allocation, and growing in 2x steps until either file contents can be loaded, or maximum buffer size, specified by directive, is reached. Thus, if there are no file fields are specified, no buffer is allocated at all. If all files loaded during indexing are under (for example) 2 MB in size, but value is 128 MB, peak buffer usage would still be only 2 MB. However, files over 128 MB would be entirely skipped. Example: max_file_field_buffer = 128M on_file_field_error How to handle IO errors in file fields. Optional, default is ignore_field. Introduced in version 2.0.2-beta. When there is a problem indexing a file referenced by a file field (), indexer can either index the document, assuming empty content in this particular field, or skip the document, or fail indexing entirely. directive controls that behavior. The values it takes are: ignore_field, index the current document without field; skip_document, skip the current document but continue indexing; fail_index, fail indexing with an error message. The problems that can arise are: open error, size error (file too big), and data read error. Warning messages on any problem will be given at all times, irregardless of the phase and the on_file_field_error setting. Note that with documents will only be ignored if problems are detected during an early check phase, and not during the actual file parsing phase. indexer will open every referenced file and check its size before doing any work, and then open it again when doing actual parsing work. So in case a file goes away between these two open attempts, the document will still be indexed. Example: on_file_field_errors = skip_document <filename>searchd</filename> program configuration options listen This setting lets you specify IP address and port, or Unix-domain socket path, that searchd will listen on. Introduced in version 0.9.9-rc1. The informal grammar for listen setting is: listen = ( address ":" port | port | path ) [ ":" protocol ] I.e. you can specify either an IP address (or hostname) and port number, or just a port number, or Unix socket path. If you specify port number but not the address, searchd will listen on all network interfaces. Unix path is identified by a leading slash. Starting with version 0.9.9-rc2, you can also specify a protocol handler (listener) to be used for connections on this socket. Supported protocol values are 'sphinx' (Sphinx 0.9.x API protocol) and 'mysql41' (MySQL protocol used since 4.1 upto at least 5.1). More details on MySQL protocol support can be found in section. Examples: listen = localhost listen = localhost:5000 listen = 192.168.0.1:5000 listen = /var/run/sphinx.s listen = 9312 listen = localhost:9306:mysql41 There can be multiple listen directives, searchd will listen for client connections on all specified ports and sockets. If no listen directives are found then the server will listen on all available interfaces using the default SphinxAPI port 9312. Starting with 1.10-beta, it will also listen on default SphinxQL port 9306. Both port numbers are assigned by IANA (see http://www.iana.org/assignments/port-numbers for details) and should therefore be available. Unix-domain sockets are not supported on Windows. address Interface IP address to bind on. Optional, default is 0.0.0.0 (ie. listen on all interfaces). DEPRECATED, use listen instead. address setting lets you specify which network interface searchd will bind to, listen on, and accept incoming network connections on. The default value is 0.0.0.0 which means to listen on all interfaces. At the time, you can not specify multiple interfaces. Example: address = 192.168.0.1 port searchd TCP port number. DEPRECATED, use listen instead. Used to be mandatory. Default port number is 9312. Example: port = 9312 log Log file name. Optional, default is 'searchd.log'. All searchd run time events will be logged in this file. Also you can use the 'syslog' as the file name. In this case the events will be sent to syslog daemon. To use the syslog option the sphinx must be configured '--with-syslog' on building. Example: log = /var/log/searchd.log query_log Query log file name. Optional, default is empty (do not log queries). All search queries will be logged in this file. The format is described in . In case of 'plain' format, you can use the 'syslog' as the path to the log file. In this case all search queries will be sent to syslog daemon with LOG_INFO priority, prefixed with '[query]' instead of timestamp. To use the syslog option the sphinx must be configured '--with-syslog' on building. Example: query_log = /var/log/query.log query_log_format Query log format. Optional, allowed values are 'plain' and 'sphinxql', default is 'plain'. Introduced in version 2.0.1-beta. Starting with version 2.0.1-beta, two different log formats are supported. The default one logs queries in a custom text format. The new one logs valid SphinxQL statements. This directive allows to switch between the two formats on search daemon startup. The log format can also be altered on the fly, using SET GLOBAL query_log_format=sphinxql syntax. Refer to for more discussion and format details. Example: query_log_format = sphinxql read_timeout Network client request read timeout, in seconds. Optional, default is 5 seconds. searchd will forcibly close the client connections which fail to send a query within this timeout. Example: read_timeout = 1 client_timeout Maximum time to wait between requests (in seconds) when using persistent connections. Optional, default is five minutes. Example: client_timeout = 3600 max_children Maximum amount of children to fork (or in other words, concurrent searches to run in parallel). Optional, default is 0 (unlimited). Useful to control server load. There will be no more than this much concurrent searches running, at all times. When the limit is reached, additional incoming clients are dismissed with temporarily failure (SEARCHD_RETRY) status code and a message stating that the server is maxed out. Example: max_children = 10 pid_file searchd process ID file name. Mandatory. PID file will be re-created (and locked) on startup. It will contain head daemon process ID while the daemon is running, and it will be unlinked on daemon shutdown. It's mandatory because Sphinx uses it internally for a number of things: to check whether there already is a running instance of searchd; to stop searchd; to notify it that it should rotate the indexes. Can also be used for different external automation scripts. Example: pid_file = /var/run/searchd.pid max_matches Maximum amount of matches that the daemon keeps in RAM for each index and can return to the client. Optional, default is 1000. Introduced in order to control and limit RAM usage, max_matches setting defines how much matches will be kept in RAM while searching each index. Every match found will still be processed; but only best N of them will be kept in memory and return to the client in the end. Assume that the index contains 2,000,000 matches for the query. You rarely (if ever) need to retrieve all of them. Rather, you need to scan all of them, but only choose "best" at most, say, 500 by some criteria (ie. sorted by relevance, or price, or anything else), and display those 500 matches to the end user in pages of 20 to 100 matches. And tracking only the best 500 matches is much more RAM and CPU efficient than keeping all 2,000,000 matches, sorting them, and then discarding everything but the first 20 needed to display the search results page. max_matches controls N in that "best N" amount. This parameter noticeably affects per-query RAM and CPU usage. Values of 1,000 to 10,000 are generally fine, but higher limits must be used with care. Recklessly raising max_matches to 1,000,000 means that searchd will have to allocate and initialize 1-million-entry matches buffer for every query. That will obviously increase per-query RAM usage, and in some cases can also noticeably impact performance. CAVEAT EMPTOR! Note that there also is another place where this limit is enforced. max_matches can be decreased on the fly through the corresponding API call, and the default value in the API is also set to 1,000. So in order to retrieve more than 1,000 matches to your application, you will have to change the configuration file, restart searchd, and set proper limit in SetLimits() call. Also note that you can not set the value in the API higher than the value in the .conf file. This is prohibited in order to have some protection against malicious and/or malformed requests. Example: max_matches = 10000 seamless_rotate Prevents searchd stalls while rotating indexes with huge amounts of data to precache. Optional, default is 1 (enable seamless rotation). Indexes may contain some data that needs to be precached in RAM. At the moment, .spa, .spi and .spm files are fully precached (they contain attribute data, MVA data, and keyword index, respectively.) Without seamless rotate, rotating an index tries to use as little RAM as possible and works as follows: new queries are temporarly rejected (with "retry" error code); searchd waits for all currently running queries to finish; old index is deallocated and its files are renamed; new index files are renamed and required RAM is allocated; new index attribute and dictionary data is preloaded to RAM; searchd resumes serving queries from new index. However, if there's a lot of attribute or dictionary data, then preloading step could take noticeble time - up to several minutes in case of preloading 1-5+ GB files. With seamless rotate enabled, rotation works as follows: new index RAM storage is allocated; new index attribute and dictionary data is asynchronously preloaded to RAM; on success, old index is deallocated and both indexes' files are renamed; on failure, new index is deallocated; at any given moment, queries are served either from old or new index copy. Seamless rotate comes at the cost of higher peak memory usage during the rotation (because both old and new copies of .spa/.spi/.spm data need to be in RAM while preloading new copy). Average usage stays the same. Example: seamless_rotate = 1 preopen_indexes Whether to forcibly preopen all indexes on startup. Optional, default is 1 (preopen everything). Starting with 2.0.1-beta, the default value for this option is now 1 (foribly preopen all indexes). In prior versions, it used to be 0 (use per-index settings). When set to 1, this directive overrides and enforces preopen on all indexes. They will be preopened, no matter what is the per-index preopen setting. When set to 0, per-index settings can take effect. (And they default to 0.) Pre-opened indexes avoid races between search queries and rotations that can cause queries to fail occasionally. They also make searchd use more file handles. In most scenarios it's therefore preferred and recommended to preopen indexes. Example: preopen_indexes = 1 unlink_old Whether to unlink .old index copies on succesful rotation. Optional, default is 1 (do unlink). Example: unlink_old = 0 attr_flush_period When calling UpdateAttributes() to update document attributes in real-time, changes are first written to the in-memory copy of attributes ( must be set to ). Then, once searchd shuts down normally (via SIGTERM being sent), the changes are written to disk. Introduced in version 0.9.9-rc1. Starting with 0.9.9-rc1, it is possible to tell searchd to periodically write these changes back to disk, to avoid them being lost. The time between those intervals is set with , in seconds. It defaults to 0, which disables the periodic flushing, but flushing will still occur at normal shut-down. Example: attr_flush_period = 900 # persist updates to disk every 15 minutes ondisk_dict_default Instance-wide defaults for ondisk_dict directive. Optional, default it 0 (precache dictionaries in RAM). Introduced in version 0.9.9-rc1. This directive lets you specify the default value of ondisk_dict for all the indexes served by this copy of searchd. Per-index directive take precedence, and will overwrite this instance-wide default value, allowing for fine-grain control. Example: ondisk_dict_default = 1 # keep all dictionaries on disk max_packet_size Maximum allowed network packet size. Limits both query packets from clients, and response packets from remote agents in distributed environment. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 8M. Introduced in version 0.9.9-rc1. Example: max_packet_size = 32M mva_updates_pool Shared pool size for in-memory MVA updates storage. Optional, default size is 1M. Introduced in version 0.9.9-rc1. This setting controls the size of the shared storage pool for updated MVA values. Specifying 0 for the size disable MVA updates at all. Once the pool size limit is hit, MVA update attempts will result in an error. However, updates on regular (scalar) attributes will still work. Due to internal technical difficulties, currently it is not possible to store (flush) any updates on indexes where MVA were updated; though this might be implemented in the future. In the meantime, MVA updates are intended to be used as a measure to quickly catchup with latest changes in the database until the next index rebuild; not as a persistent storage mechanism. Example: mva_updates_pool = 16M crash_log_path Deprecated debugging setting, path (formally prefix) for crash log files. Introduced in version 0.9.9-rc1. Deprecated in version 2.0.1-beta, as crash debugging information now gets logged into searchd.log in text form, and separate binary crash logs are no longer needed. max_filters Maximum allowed per-query filter count. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 256. Introduced in version 0.9.9-rc1. Example: max_filters = 1024 max_filter_values Maximum allowed per-filter values count. Only used for internal sanity checks, does not directly affect RAM use or performance. Optional, default is 4096. Introduced in version 0.9.9-rc1. Example: max_filter_values = 16384 listen_backlog TCP listen backlog. Optional, default is 5. Windows builds currently (as of 0.9.9) can only process the requests one by one. Concurrent requests will be enqueued by the TCP stack on OS level, and requests that can not be enqueued will immediately fail with "connection refused" message. listen_backlog directive controls the length of the connection queue. Non-Windows builds should work fine with the default value. Example: listen_backlog = 20 read_buffer Per-keyword read buffer size. Optional, default is 256K. For every keyword occurrence in every search query, there are two associated read buffers (one for document list and one for hit list). This setting lets you control their sizes, increasing per-query RAM use, but possibly decreasing IO time. Example: read_buffer = 1M read_unhinted Unhinted read size. Optional, default is 32K. When querying, some reads know in advance exactly how much data is there to be read, but some currently do not. Most prominently, hit list size in not currently known in advance. This setting lest you control how much data to read in such cases. It will impact hit list IO time, reducing it for lists larger than unhinted read size, but raising it for smaller lists. It will not affect RAM use because read buffer will be already allocated. So it should be not greater than read_buffer. Example: read_unhinted = 32K max_batch_queries Limits the amount of queries per batch. Optional, default is 32. Makes searchd perform a sanity check of the amount of the queries submitted in a single batch when using multi-queries. Set it to 0 to skip the check. Example: max_batch_queries = 256 subtree_docs_cache Max common subtree document cache size, per-query. Optional, default is 0 (disabled). Limits RAM usage of a common subtree optimizer (see ). At most this much RAM will be spent to cache document entries per each query. Setting the limit to 0 disables the optimizer. Example: subtree_docs_cache = 8M subtree_hits_cache Max common subtree hit cache size, per-query. Optional, default is 0 (disabled). Limits RAM usage of a common subtree optimizer (see ). At most this much RAM will be spent to cache keyword occurrences (hits) per each query. Setting the limit to 0 disables the optimizer. Example: subtree_hits_cache = 16M workers Multi-processing mode (MPM). Optional; allowed values are none, fork, prefork, and threads. Default is fork on Unix based systems, and threads on Windows. Introduced in version 1.10-beta. Lets you choose how searchd processes multiple concurrent requests. The possible values are: none All requests will be handled serially, one-by-one. Prior to 1.x, this was the only mode available on Windows. fork A new child process will be forked to handle every incoming request. Historically, this is the default mode. prefork On startup, searchd will pre-fork a number of worker processes, and pass the incoming requests to one of those children. threads A new thread will be created to handle every incoming request. This is the only mode compatible with RT indexing backend. Historically, searchd used fork-based model, which generally performs OK but spends a noticeable amount of CPU in fork() system call when there's a high amount of (tiny) requests per second. Prefork mode was implemented to alleviate that; with prefork, worker processes are basically only created on startup and re-created on index rotation, somewhat reducing fork() call pressure. Threads mode was implemented along with RT backend and is required to use RT indexes. (Regular disk-based indexes work in all the available modes.) Example: workers = threads dist_threads Max local worker threads to use for parallelizable requests (searching a distributed index; building a batch of snippets). Optional, default is 0, which means to disable in-request parallelism. Introduced in version 1.10-beta. Distributed index can include several local indexes. lets you easily utilize multiple CPUs/cores for that (previously existing alternative was to specify the indexes as remote agents, pointing searchd to itself and paying some network overheads). When set to a value N greater than 1, this directive will create up to N threads for every query, and schedule the specific searches within these threads. For example, if there are 7 local indexes to search and dist_threads is set to 2, then 2 parallel threads would be created: one that sequentially searches 4 indexes, and another one that searches the other 3 indexes. In case of CPU bound workload, setting to 1x the number of cores is advised (creating more threads than cores will not improve query time). In case of mixed CPU/disk bound workload it might sometimes make sense to use more (so that all cores could be utilizes even when there are threads that wait for I/O completion). Note that does not require threads MPM. You can perfectly use it with fork or prefork MPMs too. Starting with version 2.0.1-beta, building a batch of snippets with flag enabled can also be parallelized. Up to threads are be created to process those files. That speeds up snippet extraction when the total amount of document data to process is significant (hundreds of megabytes). Example: index dist_test { type = distributed local = chunk1 local = chunk2 local = chunk3 local = chunk4 } # ... dist_threads = 4 binlog_path Binary log (aka transaction log) files path. Optional, default is build-time configured data directory. Introduced in version 1.10-beta. Binary logs are used for crash recovery of RT index data, and also of attributes updates of plain disk indices that would otherwise only be stored in RAM untill flush. When logging is enabled, every transaction COMMIT-ted into RT index gets written into a log file. Logs are then automatically replayed on startup after an unclean shutdown, recovering the logged changes. directive specifies the binary log files location. It should contain just the path; will create and unlink multiple binlog.* files in that path as necessary (binlog data, metadata, and lock files, etc). Empty value disables binary logging. That improves performance, but puts RT index data at risk. WARNING! It is strongly recommended to always explicitly define 'binlog_path' option in your config. Otherwise, the default path, which in most cases is the same as working folder, may point to the folder with no write access (for example, /usr/local/var/data). In this case, the searchd will not start at all. Example: binlog_path = # disable logging binlog_path = /var/data # /var/data/binlog.001 etc will be created binlog_flush Binary log transaction flush/sync mode. Optional, default is 2 (flush every transaction, sync every second). Introduced in version 1.10-beta. This directive controls how frequently will binary log be flushed to OS and synced to disk. Three modes are supported: 0, flush and sync every second. Best performance, but up to 1 second worth of committed transactions can be lost both on daemon crash, or OS/hardware crash. 1, flush and sync every transaction. Worst performance, but every committed transaction data is guaranteed to be saved. 2, flush every transaction, sync every second. Good performance, and every committed transaction is guaranteed to be saved in case of daemon crash. However, in case of OS/hardware crash up to 1 second worth of committed transactions can be lost. For those familiar with MySQL and InnoDB, this directive is entirely similar to . In most cases, the default hybrid mode 2 provides a nice balance of speed and safety, with full RT index data protection against daemon crashes, and some protection against hardware ones. Example: binlog_flush = 1 # ultimate safety, low speed binlog_max_log_size Maximum binary log file size. Optional, default is 0 (do not reopen binlog file based on size). Introduced in version 1.10-beta. A new binlog file will be forcibly opened once the current binlog file reaches this limit. This achieves a finer granularity of logs and can yield more efficient binlog disk usage under certain borderline workloads. Example: binlog_max_log_size = 16M collation_server Default server collation. Optional, default is libc_ci. Introduced in version 2.0.1-beta. Specifies the default collation used for incoming requests. The collation can be overridden on a per-query basis. Refer to section for the list of available collations and other details. Example: collation_server = utf8_ci collation_libc_locale Server libc locale. Optional, default is C. Introduced in version 2.0.1-beta. Specifies the libc locale, affecting the libc-based collations. Refer to section for the details. Example: collation_libc_locale = fr_FR plugin_dir Trusted location for the dynamic libraries (UDFs). Optional, default is empty (no location). Introduced in version 2.0.1-beta. Specifies the trusted directory from which the UDF libraries can be loaded. Requires workers = thread to take effect. Example: workers = threads plugin_dir = /usr/local/sphinx/lib mysql_version_string A server version string to return via MySQL protocol. Optional, default is empty (return Sphinx version). Introduced in version 2.0.1-beta. Several picky MySQL client libraries depend on a particular version number format used by MySQL, and moreover, sometimes choose a different execution path based on the reported version number (rather than the indicated capabilities flags). For instance, Python MySQLdb 1.2.2 throws an exception when the version number is not in X.Y.ZZ format; MySQL .NET connector 6.3.x fails internally on version numbers 1.x along with a certain combination of flags, etc. To workaround that, you can use directive and have searchd report a different version to clients connecting over MySQL protocol. (By default, it reports its own version.) Example: mysql_version_string = 5.0.37 rt_flush_period RT indexes RAM chunk flush check period, in seconds. Optional, default is 0 (do not flush). Introduced in version 2.0.1-beta. Actively updated RT indexes that however fully fit in RAM chunks can result in ever-growing binlogs, impacting disk use and crash recovery time. With this directive the search daemon performs periodic flush checks, and eligible RAM chunks can get saved, enabling consequential binlog cleanup. See for more details. Example: rt_flush_period = 3600 thread_stack Per-thread stack size. Optional, default is 64K. Introduced in version 2.0.1-beta. In the workers = threads mode, every request is processed with a separate thread that needs its own stack space. By default, 64K per thread are allocated for stack. However, extremely complex search requests might eventually exhaust the default stack and require more. For instance, a query that matches a few thousand keywords (either directly or through term expansion) can eventually run out of stack. Previously, that resulted in crashes. Starting with 2.0.1-beta, searchd attempts to estimate the expected stack use, and blocks the potentially dangerous queries. To process such queries, you can either the thread stack size by using the thread_stack directive (or switch to a different workers setting if that is possible). A query with N levels of nesting is estimated to require approximately 30+0.12*N KB of stack, meaning that the default 64K is enough for queries with upto 300 levels, 150K for upto 1000 levels, etc. If the stack size limit is not met, searchd fails the query and reports the required stack size in the error message. Example: thread_stack = 256K expansion_limit The maximum number of expanded keywords for a single wildcard. Optional, default is 0 (no limit). Introduced in version 2.0.1-beta. When doing substring searches against indexes built with dict = keywords enabled, a single wildcard may potentially result in thousands and even millions of matched keywords (think of matching 'a*' against the entire Oxford dictionary). This directive lets you limit the impact of such expansions. Setting expansion_limit = N restricts expansions to no more than N of the most frequent matching keywords (per each wildcard in the query). Example: expansion_limit = 16 compat_sphinxql_magics Legacy SphinxQL quirks compatiblity mode. Optional, default is 1 (keep compatibility). Introduced in version 2.0.1-beta. Starting with version 2.0.1-beta, we're bringing SphinxQL in closer compliance with standard SQL. However, existing applications must not get broken, and compat_sphinxql_magics lets you upgrade safely. It defauls to 1, which enables the compatibility mode. However, SphinxQL compatibility mode is now deprecated and will be removed once we complete bringing SphinxQL in line with standard SQL syntax. So it's advised to update the applications utilising SphinxQL and then switch the daemon to the new, more SQL compliant mode by setting compat_sphinxql_magics = 0. Please refer to for the details and update instruction. Example: compat_sphinxql_magics = 0 # the future is now watchdog Threaded server watchdog. Optional, default is 1 (watchdog enabled). Introduced in version 2.0.1-beta. A crashed query in threads multi-processing mode (workers = threads) can take down the entire server. With watchdog feature enabled, searchd additionally keeps a separate lightweight process that monitors the main server process, and automatically restarts the latter in case of abnormal termination. Watchdog is enabled by default. Example: watchdog = 0 # disable watchdog prefork_rotation_throttle Delay between restarting preforked children on index rotation, in milliseconds. Optional, default is 0 (no delay). Introduced in version 2.0.2-beta. When running in workers = prefork mode, every index rotation needs to restart all children to propagate the newly loaded index data changes. Restarting all of them at once might put excessive strain on CPU and/or network connections. (For instance, when the application keeps a bunch of open persistent connections to different children, and all those children restart.) Those bursts can be throttled down with directive. Note that the children will be restarted sequentially, and thus "old" results might persist for a few more seconds. For instance, if is set to 50 (milliseconds), and there are 30 children, then the last one would only be actually restarted 1.5 seconds (50*30=1500 milliseconds) after the "rotation finished" message in the searchd event log. Example: prefork_rotation_throttle = 50 # throttle children restarts by 50 msec each Sphinx revision history Version 2.0.4-release, 02 mar 2012 Bug fixes fixed #605, pack vs mysql compress fixed #783, #862, #917, #985, #990, #1032 documentation bugs fixed #885, bitwise AND/OR were not available via API fixed #984, crash on indexing data with MAGIC_CODE_ZONE symbol fixed #1004, RT index loses words from dictionary on segments merging with id64 enabled fixed #1035, daemon doesn't properly handle FDs in case of socket overflow FD_SETSIZE ( *nix, preopen_indexes=0, worker=threads ) fixed #1038, quoted string for API select fixed #1046, head SPZ overflow, snippet generation at non fast with SPZ fixed #1048, distributed index can't sort \ filter because of missed attributes fixed #1050, expression ranker vs agents fixed #1051, added MVA64 support to UDFs fixed #1054, max_query_time not handled properly on searching at RT index fixed #1055, expansion_limit on searching at RT disk chunks fixed #1057, daemon crashes on generating snippet with 0 documents provided fixed #1060, load_files_scattered don't work fixed #1065, libsphinxclient vs distribute index (agents) fixed #1067, modifiers were not escaped in legacy query emulation fixed #1071, master - agent communication got slower for a large query fixed #1076, #1077, (redundant copying, and a possible mutex leak with uservars) fixed #1078, blended vs FIELD_END fixed #1084 crash \ index corruption on loading persist MVA fixed #1091, RT attach of plain index with string \ MVA attributes prior regular attributes fixed #1092, update got binloged with wrong TID fixed #1098, crash on creating large expression fixed #1099, cleaning up temporary files on fail of indexing fixed #1100, missing xmlpipe_attr_bigint config directive fixed #1101, now ignoring dashes within keywords when dash is not in charset_table fixed #1103, ZONE operator incorrectly works on more than one keywords in a simple zone fixed #1106, optimized WHERE id=value, WHERE id IN (values_list) clauses used in SELECT, UPDATE statements fixed #1112, Sphinx doesn't work out-of-the-box because the collision of binlog_path option fixed #1116, crash on FLUSH RTINDEX unknown-index-name fixed #1117, occasional RT headers corruption (leading to crashes and/or missing results) fixed #1119, missing expression ranker support in SphinxSE fixed #1120, negative total_found, docs and hits counter on huge indexes Version 2.0.3-release, 23 dec 2011 Bug fixes fixed #1031, SphinxQL parsing syntax for MVA at insert \ replace statements fixed #1027, stalls on attribute update in high-concurrency load fixed #1026, daemon crash on malformed API command fixed #1021, max_children option has been ignored with worker=threads fixed #1020, crash on large attribute files loading fixed #1014, crash on rotation when index has been removed from config file (worker=threads, *nix box) fixed #1001, broken MVA files in RT index while saving disk chunk fixed #995, crash on empty MVA updates fixed #994, crash on daemon shutdown with seamless_rotate=0 and workers=threads fixed #993, #998, crash on replay DELETE statement vs RT index with dict=keywords, fixed sequential INSERT into dict=keywords index right after INSERT into dict=crc index fixed #991, crash on indexing mssql source with mssql_unicode enabled fixed #983, #950, crash on host name lookup (SphinxSE with MySQL 5.5) fixed #981, snippet inconsistency with allow_empty=0 fixed #980, broken index produced by index merge in rare cases fixed #971, absent error message at master on agent "maxed out" fixed #695, #815, #835, #866, malformed warnings in SphinxQL fixed build of SphinxSE with MySQL 5.1 fixed crash log for 'fork' and 'prefork' workers Version 2.0.2-beta, 15 nov 2011 Major new features added keywords dictionary (dict=keywords) support to RT indexes added MVA, index_exact_words support to RT indexes (#888) added MVA64 (a set of BIGINTs) support to both disk and RT indexes (rt_attr_multi_64 directive) added an expression-based ranker, and a number of new ranking factors added ATTACH INDEX statement that converts a disk index to RT index added WHERE clause support to UPDATE statement added bigint, float, and MVA attribute support to UPDATE statement New features added support for upto 256 searchable fields (was upto 32 before) added FIBONACCI() function to expressions added load_files_scattered option to snippets added implicit attribute type promotions in multi-index result sets (#939) added index names to indexer progress message on merge (#928) added switch to searchd added string attribute support and a few previously missing snippets options to SphinxSE added previously missing Status(), SetConnectTimeout() API calls to Python API added ORDER BY RAND() support to SELECT statement added Sphinx version to Windows crash log added RT index support to indextool --check (checks disk chunks only) (#877) added prefork_rotation_throttle directive (preforked children restart delay, in milliseconds) (#873) added on_file_field_error directive (different sql_file_field handling modes) added manpages for all the programs added syslog logging support added sentence, paragraph, and zone support in html_strip_mode=retain mode to snippets optimized search performance with many ZONE operators improved suggestion tool (added Levenshtein limit, removed extra DB fetch) improved sentence extraction (handles salutations, starting initials better now) changed max_filter_values sanity check to 10M values New SphinxQL features added FLUSH RTINDEX statement added dist_threads directive (parallel processing), load_files, load_files_scattered, batch syntax (multiple documents) support to CALL SNIPPETS statement added OPTION comment='...' support to SELECT statement (#944) added SHOW VARIABLES statement added dummy handlers for SET TRANSACTION, SET NAMES, SELECT @@sysvar statements, and for sql_auto_is_null, sql_mode, and @@-style variables (like @@tx_isolation) in SET statement (better MySQL frameworks/connectors support) added complete SphinxQL error logging (all errors are logged now, not just SELECTs) improved SELECT statement syntax, made expressions aliases optional Bug fixes fixed #982, empty binlogs prevented upgraded daemon from starting up fixed #978, libsphinxclient build failed on sparc/sparc64 solaris fixed #977, eliminated (most) compiler warnings fixed #969, broken expression MVA/string argument type check prevented IF(IN(mva..)) and other valid expressions from working fixed #966, NOT IN @global_var syntax was not supported fixed #958, mem_limit over INT_MAX was not clamped fixed #954, UTF-8 snippets could crash on malformed data fixed #951, UTF-8 snippets could hang on malformed data fixed #947, bad float column type was reported via SphinxQL, breaking some clients fixed #940, group-by with a small enough max_matches limit could occasionaly crash and/or sort wrongly fixed #932, sending huge queries to agents occasionally failed (mainly on Windows) fixed #926, snippets did not highlight widlcard matches with morphology enabled fixed #918, crash logger did not report a proper query in dist_threads case fixed #916, watchdog caused (endless) respawns if there was a crash during shutdown fixed #904, attribute names were not forcibly case-folded in some API calls (eg. SetGroupDistinct) fixed #902, query parser did not support stopword_step=0 fixed #897, network sockets dangled (open but unattended) while replaying binlog fixed #855, allow_empty option in snippets did not always work correctly fixed #854, indexing with many bigint attributes and docinfo=inline crashed fixed #838, RT MVA insertion did not sort MVA values, caused matching issues fixed #833, duplicate MVA values were not eliminated on update fixed #832, certain (overshort/incorrect) documents crashed indexing MS SQL Unicode columns fixed #829, query parser did not properly handle numerics with blend_chars fixed #814, group-by string attributes in RT indexes dit not always work correctly fixed #812, utf8 stemming produced unexpected stems on words with single-byte chars fixed #808, huge queries crashed logging with query_log_format=sphinxql fixed #806, stray single-star keyword crashed on querying fixed #798, snippets ignored index_exact_words in query_mode fixed #797, RT klist loader had an occasional off-by-one crash fixed #791, preopen_indexes erroneously defaulted to 0 on Windows fixed #790, huge dictionaries (over 4 GB) did not work fixed #786, inplace_enable could occasionally corrupt the indexes fixed #775, doc had a typo (soundex vs metaphone) fixed #772, snippets duplicated blended chars on a SPZ boundary fixed #762, query parser truncated digit-only keywords over 15 digits fixed #736, query parser dit not properly handle blended/special char sequence fixed #726, rotation of an index with a changed attribute count crashed fixed #687, querying multiple indexes with index weights and sort-by expression produced incorrect (unadjusted) weights fixed #585, (unsupported) string ordinals were silently zeroed out with docinfo=inline (instead of failing) fixed #583, certain keywords could occasionally crash multiforms fixed that concurrent MVA updates could crash fixed that query parser did not ignore a pure blended token with a leading modifier fixed that query parser did not properly handle a modifier followed by a dash fixed that substring indexing with dict=crc did not support index_exact_words and zones fixed that in a rare edge case common subtree cache could crash fixed that empty result set returned the full schema (rather than SELECT-ed columns) fixed that SphinxQL did not have a sanity check for (currently unsupported) result set schemas over 250 attributes fixed that updates on regular indexes were not binlogged fixed that multi-query optimization check for expressions did not handle multi-index case fixed that SphinxSE did not build vs MySQL 5.5 release fixed that proximity_bm25 ranker could yield incorrect weight on duplicated keywords fixed that prefix expansion with dict=keyword occasionally crashed fixed that strip_path did not work on RT disk chunks fixed that exclude filters were not properly logged in query_log_format=sphinxql mode fixed that plain string attribute check in indextool --check was broken fixed that Java API did not let specify a connection timeout fixed that ordinal and wordcount attributes could not be fetched via SphinxQL fixed that in a rare edge case OR/ORDER would not match properly fixed that sending (huge) query response did not handle EINTR properly fixed that SPH04 ranker could yield incorrectly high weight in some cases fixed that C API did not let zero out cutoff, max_matches settings fixed that on a persistent connection there were occasionally issues handling signals while doing network reads/waitss fixed that in a rare edge case (field start modifier in a certain complex query) querying crashed fixed that snippets did not support dist_threads with load_files=0 fixed that in some extremely rare edge cases tiny parts of an index could end up corrupted with dict=keywords fixed that field/zone conditions were not propagated to expanded keywords with dict=keywords Version 2.0.1-beta, 22 apr 2011 New general features added remapping support to blend_chars directive added multi-threaded snippet batches support (requires a batch sent via API, dist_threads, and load_files) added collations (collation_server, collation_libc_locale directives) added support for sorting and grouping on string attributes (ORDER BY, GROUP BY, WITHING GROUP ORDER BY) added UDF support (plugin_dir directive; CREATE FUNCTION, DROP FUNCTION statements) added query_log_format directive, SET GLOBAL query_log_format | log_level = ... statements; and connection id tracking added sql_column_buffers directive, fixed out-of-buffer column handling in ODBC/MS SQL sources added blend_mode directive that enables indexing multiple variants of a blended sequence added UNIX socket support to C, Ruby APIs added ranged query support to sql_joined_field added rt_flush_period directive added thread_stack directive added SENTENCE, PARAGRAPH, ZONE operators (and index_sp, index_zones directives) added keywords dictionary support (and dict, expansion_limit directives) added passage_boundary, emit_zones options to snippets added a watchdog process in threaded mode added persistent MVA updates added crash dumps to searchd.log, deprecated crash_log_path directive added id32 index support in id64 binaries (EXPERIMENTAL) added SphinxSE support for DELETE and REPLACE on SphinxQL tables New SphinxQL features added new, more SQL compliant SphinxQL syntax; and a compat_sphinxql_magics directive added CRC32(), DAY(), MONTH(), YEAR(), YEARMONTH(), YEARMONTHDAY() functions added DIV, MOD, and % operators added reverse_scan=(0|1) option to SELECT added support for MySQL packets over 16M added dummy SHOW VARIABLES, SHOW COLLATION, and SET character_set_results support (to support handshake with certain client libraries and frameworks) added mysql_version_string directive (to workaround picky MySQL client libraries) added support for global filter variables, SET GLOBAL @uservar=(int_list) added DELETE ... IN (id_list) syntax support added C-style comments syntax (for example, SELECT /*!40000 some comment*/ id FROM test) added UPDATE ... WHERE id=X syntax support added SphinxQL multi-query support added DESCRIBE, SHOW TABLES statements New command-line switches added --print-queries switch to indexer that dumps SQL queries it runs added --sighup-each switch to indexer that rotates indexes one by one added --strip-path switch to searchd that skips file paths embedded in the index(-es) added --dumpconfig switch to indextool that dumps an index header in sphinx.conf format Major changes and optimizations changed default preopen_indexes value to 1 optimized English stemmer (results in 1.3x faster snippets and indexing with morphology=stem_en) optimized snippets, 1.6x general speedup optimized const-list parsing in SphinxQL optimized full-document highlighting CPU/RAM use optimized binlog replay (improved performance on K-list update) Bug fixes fixed #767, joined fields vs ODBC sources fixed #757, wordforms shared by indexes with different settings fixed #733, loading of indexes in formats prior to v.14 fixed #763, occasional snippets failures fixed #648, occasionally missed rotations on multiple SIGHUPs fixed #750, an RT segment merge leading to false positives and/or crashes in some cases fixed #755, zones in snippets output fixed #754, stopwords counting at snippet passage generation fixed #723, fork/prefork index rotation in children processes fixed #696, freeze on zero threshold in quorum operator fixed #732, query escaping in SphinxSE fixed #739, occasional crashes in MT mode on result set send fixed #746, crash with a named list in SphinxQL option fixed #674, AVG vs group order fixed #734, occasional crashes attempting to report NULL errors fixed #829, tail hits within field position modifier fixed #712, missing query_mode, force_all_words snippet option defaults in Java API fixed #721, added dupe removal on RT batch INSERT/REPLACE fixed #720, potential extraneous highlighting after a blended keyword fixed #702, exceptions vs star search fixed #666, ext2 query grouping vs exceptions fixed #688, WITHIN GROUP ORDER BY related crash fixed #660, multi-queue batches vs dist_threads fixed #678, crash on dict=keywords vs xmlpipe vs min_prefix_len fixed #596, ECHILD vs scripted configs fixed #653, dependency in expression, sorting, grouping fixed #661, concurrent distributed searches vs workers=threads fixed #646, crash on status query via UNIX socket fixed #589, libexpat.dll missing from some Win32 build types fixed #574, quorum match order fixed multiple documentation issues (#372, #483, #495, #601, #623, #632, #654) fixed that ondisk_dict did not affect RT indexes fixed that string attributes check in indextool --check was erroneously sensitive to string data order fixed a rare crash when using BEFORE operator fixed an issue with multiforms vs BuildKeywords() fixed an edge case in OR operator (emitted wrong hits order sometimes) fixed aliasing in docinfo accessors that lead to very rare crashes and/or missing results fixed a syntax error on a short token at the end of a query fixed id64 filtering and performance degradation with range filters fixed missing rankers in libsphinxclient fixed missing SPH04 ranker in SphinxSE fixed column names in sql_attr_multi sample (works with example.sql now) fixed an issue with distributed local+remote setup vs aggregate functions fixed case sensitive columns names in RT indexes fixed a crash vs strings from multiple indexes in result set fixed blended keywords vs snippets fixed secure_connection vs MySQL protocol vs MySQL.NET connector fixed that Python API did not works with Python 2.3 fixed overshort_step vs snippets fixed keyword staistics vs dist_threads searching fixed multiforms vs query parsing (vs quorum) fixed missed quorum words vs RT segments fixed blended keywords occasionally skipping extra character when querying (eg "abc[]") fixed Python API to handle int32 values fixed prefix and infix indexing of joined fields fixed MVA ranged query fixed missing blended state reset on document boundary fixed a crash on missing index while replaying binlog fixed an error message on filter values overrun fixed passage duplication in snippets in weight_order mode fixed select clauses over 1K vs remote agents fixed overshort accounting vs soft-whitespace tokens fixed rotation vs workers=threads fixed schema issues vs distributed indexes fixed blended-escaped sequence parsing issue fixed MySQL IN clause (values order etc) fixed that post_index did not execute when 0 documents were succesfully indexed fixed field position limit vs many hits fixed that joined fields missed an end marker at field end fixed that xxx_step settings were missing from .sph index header fixed libsphinxclient missing request cleanup in sphinx_query() (eg after network errors) fixed that index_weights were ignored when grouping fixed multi wordforms vs blend_chars fixed broken MVA output in SphinxQL fixed a few RT leaks fixed an issue with RT string storage going missing fixed an issue with repeated queries vs dist_threads fixed an issue with string attributes vs buffer overrun in SphinxQL fixed unexpected character data warnings within ignored xmlpipe tags fixed a crash in snippets with NEAR syntax query fixed passage duplication in snippets fixed libsphinxclient SIGPIPE handling fixed libsphinxclient vs VS2003 compiler bug Version 1.10-beta, 19 jul 2010 added RT indexes support () added prefork and threads support (workers directives) added multi-threaded local searches in distributed indexes (dist_threads directive) added common subquery cache (subtree_docs_cache, subtree_hits_cache directives) added string attributes support (sql_attr_string, sql_field_string, xml_attr_string, xml_field_string directives) added indexing-time word counter (sql_attr_str2wordcount, sql_field_str2wordcount directives) added CALL SNIPPETS(), CALL KEYWORDS() SphinxQL statements added , options to SphinxQL SELECT statement added insert-only SphinxQL-talking tables to SphinxSE (connection='sphinxql://host[:port]/index') added option to SphinxSE queries added backtrace on crash to searchd added SQL+FS indexing, aka loading files by names fetched from SQL (sql_file_field directive) added a watchdog in threads mode to searchd added automatic row phantoms elimination to index merge added hitless indexing support (hitless_words directive) added --check, --strip-path, --htmlstrip, --dumphitlist ... --wordid switches to indextool added --stopwait, --logdebug switches to searchd added --dump-rows, --verbose switches to indexer added "blended" characters indexing support (blend_chars directive) added joined/payload field indexing (sql_joined_field directive) added FlushAttributes() API call added query_mode, force_all_words, limit_passages, limit_words, start_passage_id, load_files, html_strip_mode, allow_empty options, and %PASSAGE_ID% macro in before_match, after_match options to BuildExcerpts() API call added @groupby/@count/@distinct columns support to SELECT (but not to expressions) added query-time keyword expansion support (expand_keywords directive, SPH_RANK_SPH04 ranker) added query batch size limit option (max_batch_queries directive; was hardcoded) added SINT() function to expressions improved SphinxQL syntax error reporting improved expression optimizer (better constant handling) improved dash handling within keywords (no longer treated as an operator) improved snippets (better passage selection/trimming, around option now a hard limit) optimized index format that yields ~20-30% smaller indexes optimized sorting code (indexing time 1-5% faster on average; 100x faster in worst case) optimized searchd startup time (moved .spa preindexing to indexer), added a progress bar optimized queries against indexes with many attributes (eliminated redundant copying) optimized 1-keyword queries (performace regression introduced in 0.9.9) optimized SphinxQL protocol overheads, and performance on bigger result sets optimized unbuffered attributes writes on index merge changed attribute handling, duplicate names are strictly forbidden now fixed that SphinxQL sessions could stall shutdown fixed consts with leading minus in SphinxQL fixed AND/OR precedence in expressions fixed #334, AVG() on integers was not computed in floats fixed #371, attribute flush vs 2+ GB files fixed #373, segfault on distributed queries vs certain libc versions fixed #398, stopwords not stopped in prefix/infix indexes fixed #404, erroneous MVA failures in indextool --check fixed #408, segfault on certain query batches (regular scan, plus a scan with MVA groupby) fixed #431, occasional shutdown hangs in preforked workers fixed #436, trunk checkout builds vs Solaris sh fixed #440, escaping vs parentheses declared as valid in charset_table fixed #442, occasional non-aligned free in MVA indexing fixed #447, occasional crashes in MVA indexing fixed #449, pconn busyloop on aborted clients on certain arches fixed #465, build issue on Alpha fixed #468, build issue in libsphinxclient fixed #472, multiple stopword files failing to load fixed #489, buffer overflow in query logging fixed #493, Python API assertion after error returned from Query() fixed #500, malformed MySQL packet when sending MVAs fixed #504, SIGPIPE in libsphinxclient fixed #506, better MySQL protocol commands support in SphinxQL (PING etc) fixed #509, indexing ranged results from stored procedures Version 0.9.9-release, 02 dec 2009 added Open, Close, Status calls to libsphinxclient (C API) added automatic persistent connection reopening to PHP, Python APIs added 64-bit value/range filters, fullscan mode support to SphinxSE MAJOR CHANGE, our IANA assigned ports are 9312 and 9306 respectively (goodbye, trusty 3312) MAJOR CHANGE, erroneous filters now fail with an error (were silently ignored before) optimized unbuffered .spa writes on merge optimized 1-keyword queries ranking in extended2 mode fixed #441 (IO race in case of highly conccurent load on a preopened) fixed #434 (distrubuted indexes were not searchable via MySQL protocol) fixed #317 (indexer MVA progress counter) fixed #398 (stopwords not removed from search query) fixed #328 (broken cutoff) fixed #250 (now quoting paths w/spaces when installing Windows service) fixed #348 (K-list was not updated on merge) fixed #357 (destination index were not K-list-filtered on merge) fixed #369 (precaching .spi files over 2 GBs) fixed #438 (missing boundary proximity matches) fixed #371 (.spa flush in case of files over 2 GBs) fixed #373 (crashes on distributed queries via mysql proto) fixed critical bugs in hit merging code fixed #424 (ordinals could be misplaced during indexing in case of bitfields etc) fixed #426 (failing SE build on Solaris; thanks to Ben Beecher) fixed #423 (typo in SE caused crash on SHOW STATUS) fixed #363 (handling of read_timeout over 2147 seconds) fixed #376 (minor error message mismatch) fixed #413 (minus in SphinxQL) fixed #417 (floats w/o leading digit in SphinxQL) fixed #403 (typo in SetFieldWeights name in Java API) fixed index rotation vs persistent connections fixed backslash handling in SphinxQL parser fixed uint unpacking vs. PHP 5.2.9 (possibly other versions) fixed #325 (filter settings send from SphinxSE) fixed #352 (removed mysql wrapper around close() in SphinxSE) fixed #389 (display error messages through SphinxSE status variable) fixed linking with port-installed iconv on OS X fixed negative 64-bit unpacking in PHP API fixed #349 (escaping backslash in query emulation mode) fixed #320 (disabled multi-query route when select items differ) fixed #353 (better quorum counts check) fixed #341 (merging of trailing hits; maybe other ranking issues too) fixed #368 (partially; @field "" caused crashes; now resets field limit) fixed #365 (field mask was leaking on field-limited terms) fixed #339 (updated debug query dumper) fixed #361 (added SetConnectTimeout() to Java API) fixed #338 (added missing fullscan to mode check in Java API) fixed #323 (added floats support to SphinxQL) fixed #340 (support listen=port:proto syntax too) fixed #332 (\r is legal SphinxQL space now) fixed xmlpipe2 K-lists fixed #322 (safety gaps in mysql protocol row buffer) fixed #313 (return keyword stats for empty indexes too) fixed #344 (invalid checkpoints after merge) fixed #326 (missing CLOCK_xxx on FreeBSD) Version 0.9.9-rc2, 08 apr 2009 added IsConnectError(), Open(), Close() calls to Java API (bug #240) added read_buffer, read_unhinted directives added checks for build options returned by mysql_config (builds on Solaris now) added fixed-RAM index merge (bug #169) added logging chained queries count in case of (optimized) multi-queries added GEODIST() function added --status switch to searchd added MySpell (OpenOffice) affix file support (bug #281) added ODBC support (both Windows and UnixODBC) added support for @id in IN() (bug #292) added support for aggregate functions in GROUP BY (namely AVG, MAX, MIN, SUM) added MySQL UDF that builds snippets using searchd added write_buffer directive (defaults to 1M) added xmlpipe_fixup_utf8 directive added suggestions sample added microsecond precision int64 timer (bug #282) added listen_backlog directive added max_xmlpipe2_field directive added initial SphinxQL support to mysql41 handler, SELECT .../SHOW WARNINGS/STATUS/META are handled added support for different network protocols, and mysql41 protocol added fieldmask ranker, updated SphinxSE list of rankers added mysql_ssl_xxx directives added --cpustats (requires clock_gettime()) and --status switches to searchd added performance counters, Status() API call added overshort_step and stopword_step directives added strict order operator (aka operator before, eg. "one << two << three") added indextool utility, moved --dumpheader there, added --debugdocids, --dumphitlist options added own RNG, reseeded on @random sort query (bug #183) added field-start and field-end modifiers support (syntax is "^hello world$"; field-end requires reindex) added MVA attribute support to IN() function added AND, OR, and NOT support to expressions improved logging of (optimized) multi-queries (now logging chained query count) improved handshake error handling, fixed protocol version byte order (omg) updated SphinxSE to protocol 1.22 allowed phrase_boundary_step=-1 (trick to emulate keyword expansion) removed SPH_MAX_QUERY_WORDS limit fixed CLI search vs documents missing from DB (bug #257) fixed libsphinxclient results leak on subsequent sphinx_run_queries call (bug #256) fixed libsphinxclient handling of zero max_matches and cutoff (bug #208) fixed Java API over-64K string reads (eg. big snippets) in Java API (bug #181) fixed Java API 2nd Query() after network error in 1st Query() call (bug #308) fixed typo-class bugs in SetFilterFloatRange (bug #259), SetSortMode (bug #248) fixed missing @@relaxed support (bug #276), fixed missing error on @nosuchfield queries, documented @@relaxed fixed UNIX socket permissions to 0777 (bug #288) fixed xmlpipe2 crash on schemas with no fields, added better document structure checks fixed (and optimized) expr parser vs IN() with huge (10K+) args count fixed double EarlyCalc() in fullscan mode (minor performance impact) fixed phrase boundary handling in some cases (on buffer end, on trailing whitespace) fixes in snippets (aka excerpts) generation fixed inline attrs vs id64 index corruption fixed head searchd crash on config re-parse failure fixed handling of numeric keywords with leading zeroes such as "007" (bug #251) fixed junk in SphinxSE status variables (bug #304) fixed wordlist checkpoints serialization (bug #236) fixed unaligned docinfo id access (bug #230) fixed GetRawBytes() vs oversized blocks (headers with over 32K charset_table should now work, bug #300) fixed buffer overflow caused by too long dest wordform, updated tests fixed IF() return type (was always int, is deduced now) fixed legacy queries vs. special chars vs. multiple indexes fixed write-write-read socket access pattern vs Nagle vs delays vs FreeBSD (oh wow) fixed exceptions vs query-parser issue fixed late calc vs @weight in expressions (bug #285) fixed early lookup/calc vs filters (bug #284) fixed emulated MATCH_ANY queries (empty proximity and phrase queries are allowed now) fixed MATCH_ANY ranker vs fields with no matches fixed index file size vs inplace_enable (bug #245) fixed that old logs were not closed on USR1 (bug #221) fixed handling of '!' alias to NOT operator (bug #237) fixed error handling vs query steps (step failure was not reported) fixed querying vs inline attributes fixed stupid bug in escaping code, fixed EscapeString() and made it static fixed parser vs @field -keyword, foo|@field bar, "" queries (bug #310) Version 0.9.9-rc1, 17 nov 2008 added min_stemming_len directive added IsConnectError() API call (helps distingusih API vs remote errors) added duplicate log messages filter to searchd added --nodetach debugging switch to searchd added blackhole agents support for debugging/testing (agent_blackhole directive) added max_filters, max_filter_values directives (were hardcoded before) added int64 expression evaluation path, automatic inference, and BIGINT() enforcer function added crash handler for debugging (crash_log_path directive) added MS SQL (aka SQL Server) source support (Windows only, mssql_winauth and mssql_unicode directives) added indexer-side column unpacking feature (unpack_zlib, unpack_mysqlcompress directives) added nested brackers and NOTs support to query language, rewritten query parser added persistent connections support (Open() and Close() API calls) added index_exact_words feature, and exact form operator to query language ("hello =world") added status variables support to SphinxSE (SHOW STATUS LIKE 'sphinx_%') added max_packet_size directive (was hardcoded at 8M before) added UNIX socket support, and multi-interface support (listen directive) added star-syntax support to BuildExcerpts() API call added inplace inversion of .spa and .spp (inplace_enable directive, 1.5-2x less disk space for indexing) added builtin Czech stemmer (morphology=stem_cz) added IDIV(), NOW(), INTERVAL(), IN() functions to expressions added index-level early-reject based on filters added MVA updates feature (mva_updates_pool directive) added select-list feature with computed expressions support (see SetSelect() API call, test.php --select switch), protocol 1.22 added integer expressions support (2x faster than float) added multiforms support (multiple source words in wordforms file) added legacy rankers (MATCH_ALL/MATCH_ANY/etc), removed legacy matching code (everything runs on V2 engine now) added field position limit modifier to field operator (syntax: @title[50] hello world) added killlist support (sql_query_killlist directive, --merge-killlists switch) added on-disk SPI support (ondisk_dict directive) added indexer IO stats added periodic .spa flush (attr_flush_period directive) added config reload on SIGHUP added per-query attribute overrides feature (see SetOverride() API call); protocol 1.21 added signed 64bit attrs support (sql_attr_bigint directive) improved HTML stripper to also skip PIs (<? ... ?>, such as <?php ... ?>) improved excerpts speed (upto 50x faster on big documents) fixed a short window of searchd inaccessibility on startup (started listen()ing too early before) fixed .spa loading on systems where read() is 2GB capped fixed infixes vs morphology issues fixed backslash escaping, added backslash to EscapeString() fixed handling of over-2GB dictionary files (.spi) Version 0.9.8.1, 30 oct 2008 added configure script to libsphinxclient changed proximity/quorum operator syntax to require whitespace after length fixed potential head process crash on SIGPIPE during "maxed out" message fixed handling of incomplete remote replies (caused over-degraded distributed results, in rare cases) fixed sending of big remote requests (caused distributed requests to fail, in rare cases) fixed FD_SET() overflow (caused searchd to crash on startup, in rare cases) fixed MVA vs distributed indexes (caused loss of 1st MVA value in result set) fixed tokenizing of exceptions terminated by specials (eg. "GPS AT&T" in extended mode) fixed buffer overrun in stemmer on overlong tokens occasionally emitted by proximity/quorum operator parser (caused crashes on certain proximity/quorum queries) fixed wordcount ranker (could be dropping hits) fixed --merge feature (numerous different fixes, caused broken indexes) fixed --merge-dst-range performance fixed prefix/infix generation for stopwords fixed ignore_chars vs specials fixed misplaced F_SETLKW check (caused certain build types, eg. RPM build on FC8, to fail) fixed dictionary-defined charsets support in spelldump, added \x-style wordchars support fixed Java API to properly send long strings (over 64K; eg. long document bodies for excerpts) fixed Python API to accept offset/limit of 'long' type fixed default ID range (that filtered out all 64-bit values) in Java and Python APIs Version 0.9.8, 14 jul 2008 Indexing added support for 64-bit document and keyword IDs, --enable-id64 switch to configure added support for floating point attributes added support for bitfields in attributes, sql_attr_bool directive and bit-widths part in sql_attr_uint directive added support for multi-valued attributes (MVA) added metaphone preprocessor added libstemmer library support, provides stemmers for a number of additional languages added xmlpipe2 source type, that supports arbitrary fields and attributes added word form dictionaries, wordforms directive (and spelldump utility) added tokenizing exceptions, exceptions directive added an option to fully remove element contents to HTML stripper, html_remove_elements directive added HTML entities decoder (with full XHTML1 set support) to HTML stripper added per-index HTML stripping settings, html_strip, html_index_attrs, and html_remove_elements directives added IO load throttling, max_iops and max_iosize directives added SQL load throttling, sql_ranged_throttle directive added an option to index prefixes/infixes for given fields only, prefix_fields and infix_fields directives added an option to ignore certain characters (instead of just treating them as whitespace), ignore_chars directive added an option to increment word position on phrase boundary characters, phrase_boundary and phrase_boundary_step directives added --merge-dst-range switch (and filters) to index merging feature (--merge switch) added mysql_connect_flags directive (eg. to reduce indexing time MySQL network traffic and/or time) improved ordinals sorting; now runs in fixed RAM improved handling of documents with zero/NULL ids, now skipping them instead of aborting Search daemon added an option to unlink old index on succesful rotation, unlink_old directive added an option to keep index files open at all times (fixes subtle races on rotation), preopen and preopen_indexes directives added an option to profile searchd disk I/O, --iostats command-line option added an option to rotate index seamlessly (fully avoids query stalls), seamless_rotate directive added HTML stripping support to excerpts (uses per-index settings) added 'exact_phrase', 'single_passage', 'use_boundaries', 'weight_order 'options to BuildExcerpts() API call added distributed attribute updates propagation added distributed retries on master node side added log reopen on SIGUSR1 added --stop switch (sends SIGTERM to running instance) added Windows service mode, and --servicename switch added Windows --rotate support improved log timestamping, now with millisecond precision Querying added extended engine V2 (faster, cleaner, better; SPH_MATCH_EXTENDED2 mode) added ranking modes support (V2 engine only; SetRankingMode() API call) added quorum searching support to query language (V2 engine only; example: "any three of all these words"/3) added query escaping support to query language, and EscapeString() API call added multi-field syntax support to query language (example: "@(field1,field2) something"), and @@relaxed field checks option added optional star-syntax ('word*') support in keywords, enable_star directive (for prefix/infix indexes only) added full-scan support (query must be fully empty; can perform block-reject optimization) added COUNT(DISTINCT(attr)) calculation support, SetGroupDistinct() API call added group-by on MVA support, SetArrayResult() PHP API call added per-index weights feature, SetIndexWeights() API call added geodistance support, SetGeoAnchor() API call added result set sorting by arbitrary expressions in run time (eg. "@weight+log(price)*2.5"), SPH_SORT_EXPR mode added result set sorting by @custom compile-time sorting function (see src/sphinxcustomsort.inl) added result set sorting by @random value added result set merging for indexes with different schemas added query comments support (3rd arg to Query()/AddQuery() API calls, copied verbatim to query log) added keyword extraction support, BuildKeywords() API call added binding field weights by name, SetFieldWeights() API call added optional limit on query time, SetMaxQueryTime() API call added optional limit on found matches count (4rd arg to SetLimits() API call, so-called 'cutoff') APIs and SphinxSE added pure C API (libsphinxclient) added Ruby API (thanks to Dmytro Shteflyuk) added Java API added SphinxSE support for MVAs (use varchar), floats (use float), 64bit docids (use bigint) added SphinxSE options "floatrange", "geoanchor", "fieldweights", "indexweights", "maxquerytime", "comment", "host" and "port"; and support for "expr:CLAUSE" improved SphinxSE max query size (using MySQL condition pushdown), upto 256K now General added scripting (shebang syntax) support to config files (example: #!/usr/bin/php in the first line) added unified config handling and validation to all programs added unified documentation added .spec file for RPM builds added automated testing suite improved index locking, now fcntl()-based instead of buggy file-existence-based fixed unaligned RAM accesses, now works on SPARC and ARM Changes and fixes since 0.9.8-rc2 added pure C API (libsphinxclient) added Ruby API added SetConnectTimeout() PHP API call added allowed type check to UpdateAttributes() handler (bug #174) added defensive MVA checks on index preload (protection against broken indexes, bug #168) added sphinx-min.conf sample file added --without-iconv switch to configure removed redundant -lz dependency in searchd removed erroneous "xmlpipe2 deprecated" warning fixed EINTR handling in piped read (bug #166) fixup query time before logging and sending to client (bug #153) fixed attribute updates vs full-scan early-reject index (bug #149) fixed gcc warnings (bug #160) fixed mysql connection attempt vs pgsql source type (bug #165) fixed 32-bit wraparound when preloading over 2 GB files fixed "out of memory" message vs over 2 GB allocs (bug #116) fixed unaligned RAM access detection on ARM (where unaligned reads do not crash but produce wrong results) fixed missing full scan results in some cases fixed several bugs in --merge, --merge-dst-range fixed @geodist vs MultiQuery and filters, @expr vs MultiQuery fixed GetTokenEnd() vs 1-grams (was causing crash in excerpts) fixed sql_query_range to handle empty strings in addition to NULL strings (Postgres specific) fixed morphology=none vs infixes fixed case sensitive attributes names in UpdateAttributes() fixed ext2 ranking vs. stopwords (now using atompos from query parser) fixed EscapeString() call fixed escaped specials (now handled as whitespace if not in charset) fixed schema minimizer (now handles type/size mismatches) fixed word stats in extended2; stemmed form is now returned fixed spelldump case folding vs dictionary-defined character sets fixed Postgres BOOLEAN handling fixed enforced "inline" docinfo on empty indexes (normally ok, but index merge was really confused) fixed rare count(distinct) out-of-bounds issue (it occasionaly caused too high @distinct values) fixed hangups on documents with id=DOCID_MAX in some cases fixed rare crash in tokenizer (prefixed synonym vs. input stream eof) fixed query parser vs "aaa (bbb ccc)|ddd" queries fixed BuildExcerpts() request in Java API fixed Postgres specific memory leak fixed handling of overshort keywords (less than min_word_len) fixed HTML stripper (now emits space after indexed attributes) fixed 32-field case in query parser fixed rare count(distinct) vs. querying multiple local indexes vs. reusable sorter issue fixed sorting of negative floats in SPH_SORT_EXTENDED mode Version 0.9.7, 02 apr 2007 added support for added support for upto 5 sort-by attrs (in extended sorting mode) added support for separate groups sorting clause (in group-by mode) added support for on-the-fly attribute updates (PRE-ALPHA; will change heavily; use for preliminary testing ONLY) added support for zero/NULL attributes added support for 0.9.7 features to SphinxSE added support for n-grams (alpha, 1-grams only for now) added support for warnings reported to client added support for exclude-filters added support for prefix and infix indexing (see , ) added syntax to reset current field to query language added removal of duplicate entries in query index order added PHP API workarounds for PHP signed/unsigned braindamage added locks to avoid two concurrent indexers working on same index added check for existing attributes vs. case improved groupby code a lot (better precision, and upto 25x times faster in extreme cases) improved error handling and reporting improved handling of broken indexes (reports error instead of hanging/crashing) improved limits for attributes and wordlists (now able to map over 4 GB on x64 and over 2 GB on x32 where possible) improved pressure in head daemon (search time should not degrade with time any more) improved test.php command line options improved error reporting (distributed query, broken index etc issues now reported to client) changed default network packet size to be 8M, added extra checks fixed division by zero in BM25 on 1-document collections (in extended matching mode) fixed .spl files getting unlinked fixed crash in schema compatibility test fixed UTF-8 Russian stemmer fixed requested matches count when querying distributed agents fixed signed vs. unsigned issues everywhere (ranged queries, CLI search output, and obtaining docid) fixed potential crashes vs. negative query offsets fixed 0-match docs vs. extended mode vs. stats fixed group/timestamp filters being ignored if querying from older clients fixed docs to mention source type fixed issues with explicit '&' in extended matching mode fixed wrong assertion in SBCS encoder fixed crashes with no-attribute indexes after rotate Version 0.9.7-rc2, 15 dec 2006 added support for extended matching mode (query language) added support for extended sorting mode (sorting clauses) added support for SBCS excerpts added for attributes and wordlist (improves search time, speeds up greatly) fixed attribute name handling to be case insensitive fixed default compiler options to simplify post-mortem debugging (added , removed ) fixed rare memory leak fixed "hello hello" queries in "match phrase" mode fixed issue with excerpts, texts and overlong queries fixed logging multiple index name (no longer tokenized) fixed trailing stopword not flushed from tokenizer fixed boolean evaluation fixed pidfile being wrongly on failure fixed (they conflicted with well-known paths) fixes for 64-bit platforms Version 0.9.7-rc1, 26 oct 2006 added alpha index merging code added an option to decrease per-query added an option to specify IP address for searchd to listen on added support for unlimited amount of configured sources and indexes added support for group-by queries added support for /2 range modifier in charset_table added support for arbitrary amount of document attributes added logging filter count and index name added option to configure to compile in debug mode added when compiling in default mode improved search time (added doclist size hints, in-memory wordlist cache, and used VLB coding everywhere) improved (refactored) SQL driver code (adding new drivers should be very easy now) improved exceprts generation fixed issue with empty sources and ranged queries fixed querying purely remote distributed indexes fixed suffix length check in English stemmer in some cases fixed UTF-8 decoder for codes over U+20000 (for CJK) fixed UTF-8 encoder for 3-byte sequences (for CJK) fixed overshort (less than ) words prepended to next field fixed source connection order (indexer does not connect to all sources at once now) fixed line numbering in config parser fixed some issues with index rotation Version 0.9.6, 24 jul 2006 added support for empty indexes added support for multiple sql_query_pre/post/post_index fixed timestamp ranges filter in "match any" mode fixed configure issues with --without-mysql and --with-pgsql options fixed building on Solaris 9 Version 0.9.6-rc1, 26 jun 2006 added boolean queries support (experimental, beta version) added simple file-based query cache (experimental, beta version) added storage engine for MySQL 5.0 and 5.1 (experimental, beta version) added GNU style configure script added new searchd protocol (all binary, and should be backwards compatible) added distributed searching support to searchd added PostgreSQL driver added excerpts generation added option to index added option to searchd, removed hardcoded MAX_MATCHES limit added initial documentation, and a working example.sql added support for multiple sources per index added soundex support added group ID ranges support added command-line option to search utility added option to indexer added option to search fixed UTF-8 decoder (3-byte codepoints did not work) fixed PHP API to handle big result sets faster fixed config parser to handle empty values properly fixed redundant time(NULL) calls in time-segments mode sphinx-2.0.4-release/libstemmer_c/0000755000176700017710000000000011724063141016407 5ustar deogardeogarsphinx-2.0.4-release/libstemmer_c/Makefile.am0000644000176700017710000000024410627001641020441 0ustar deogardeogarif USE_LIBSTEMMER noinst_LIBRARIES = libstemmer.a include $(srcdir)/mkinc.mak noinst_HEADERS = $(snowball_headers) libstemmer_a_SOURCES = $(snowball_sources) endif sphinx-2.0.4-release/libstemmer_c/libstemmer_c.sln0000644000176700017710000000154010627023661021576 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 9.00 # Visual Studio 2005 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libstemmer_c", "libstemmer_c.vcproj", "{0F64D11C-6E40-4F63-8B7F-E5B615D38301}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 Release|Win32 = Release|Win32 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {0F64D11C-6E40-4F63-8B7F-E5B615D38301}.Debug|Win32.ActiveCfg = Debug|Win32 {0F64D11C-6E40-4F63-8B7F-E5B615D38301}.Debug|Win32.Build.0 = Debug|Win32 {0F64D11C-6E40-4F63-8B7F-E5B615D38301}.Release|Win32.ActiveCfg = Release|Win32 {0F64D11C-6E40-4F63-8B7F-E5B615D38301}.Release|Win32.Build.0 = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sphinx-2.0.4-release/libstemmer_c/Makefile.in0000644000176700017710000026203411403722010020452 0ustar deogardeogar# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # libstemmer/mkinc.mak: List of stemming module source files # # This file is generated by mkmodules.pl from a list of module names. # Do not edit manually. # # Modules included by this file are: danish, dutch, english, finnish, french, # german, hungarian, italian, norwegian, porter, portuguese, romanian, # russian, spanish, swedish, turkish VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : DIST_COMMON = README $(am__noinst_HEADERS_DIST) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/mkinc.mak subdir = libstemmer_c ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LIBRARIES = $(noinst_LIBRARIES) AR = ar ARFLAGS = cru libstemmer_a_AR = $(AR) $(ARFLAGS) libstemmer_a_LIBADD = am__libstemmer_a_SOURCES_DIST = src_c/stem_ISO_8859_1_danish.c \ src_c/stem_UTF_8_danish.c src_c/stem_ISO_8859_1_dutch.c \ src_c/stem_UTF_8_dutch.c src_c/stem_ISO_8859_1_english.c \ src_c/stem_UTF_8_english.c src_c/stem_ISO_8859_1_finnish.c \ src_c/stem_UTF_8_finnish.c src_c/stem_ISO_8859_1_french.c \ src_c/stem_UTF_8_french.c src_c/stem_ISO_8859_1_german.c \ src_c/stem_UTF_8_german.c src_c/stem_ISO_8859_1_hungarian.c \ src_c/stem_UTF_8_hungarian.c src_c/stem_ISO_8859_1_italian.c \ src_c/stem_UTF_8_italian.c src_c/stem_ISO_8859_1_norwegian.c \ src_c/stem_UTF_8_norwegian.c src_c/stem_ISO_8859_1_porter.c \ src_c/stem_UTF_8_porter.c src_c/stem_ISO_8859_1_portuguese.c \ src_c/stem_UTF_8_portuguese.c src_c/stem_ISO_8859_2_romanian.c \ src_c/stem_UTF_8_romanian.c src_c/stem_KOI8_R_russian.c \ src_c/stem_UTF_8_russian.c src_c/stem_ISO_8859_1_spanish.c \ src_c/stem_UTF_8_spanish.c src_c/stem_ISO_8859_1_swedish.c \ src_c/stem_UTF_8_swedish.c src_c/stem_UTF_8_turkish.c \ runtime/api.c runtime/utilities.c libstemmer/libstemmer.c @USE_LIBSTEMMER_TRUE@am__objects_1 = stem_ISO_8859_1_danish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_danish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_dutch.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_dutch.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_english.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_english.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_finnish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_finnish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_french.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_french.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_german.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_german.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_hungarian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_hungarian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_italian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_italian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_norwegian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_norwegian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_porter.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_porter.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_portuguese.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_portuguese.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_2_romanian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_romanian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_KOI8_R_russian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_russian.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_spanish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_spanish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_ISO_8859_1_swedish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_swedish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ stem_UTF_8_turkish.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ api.$(OBJEXT) utilities.$(OBJEXT) \ @USE_LIBSTEMMER_TRUE@ libstemmer.$(OBJEXT) @USE_LIBSTEMMER_TRUE@am_libstemmer_a_OBJECTS = $(am__objects_1) libstemmer_a_OBJECTS = $(am_libstemmer_a_OBJECTS) DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)/config depcomp = $(SHELL) $(top_srcdir)/config/depcomp am__depfiles_maybe = depfiles am__mv = mv -f COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ SOURCES = $(libstemmer_a_SOURCES) DIST_SOURCES = $(am__libstemmer_a_SOURCES_DIST) am__noinst_HEADERS_DIST = src_c/stem_ISO_8859_1_danish.h \ src_c/stem_UTF_8_danish.h src_c/stem_ISO_8859_1_dutch.h \ src_c/stem_UTF_8_dutch.h src_c/stem_ISO_8859_1_english.h \ src_c/stem_UTF_8_english.h src_c/stem_ISO_8859_1_finnish.h \ src_c/stem_UTF_8_finnish.h src_c/stem_ISO_8859_1_french.h \ src_c/stem_UTF_8_french.h src_c/stem_ISO_8859_1_german.h \ src_c/stem_UTF_8_german.h src_c/stem_ISO_8859_1_hungarian.h \ src_c/stem_UTF_8_hungarian.h src_c/stem_ISO_8859_1_italian.h \ src_c/stem_UTF_8_italian.h src_c/stem_ISO_8859_1_norwegian.h \ src_c/stem_UTF_8_norwegian.h src_c/stem_ISO_8859_1_porter.h \ src_c/stem_UTF_8_porter.h src_c/stem_ISO_8859_1_portuguese.h \ src_c/stem_UTF_8_portuguese.h src_c/stem_ISO_8859_2_romanian.h \ src_c/stem_UTF_8_romanian.h src_c/stem_KOI8_R_russian.h \ src_c/stem_UTF_8_russian.h src_c/stem_ISO_8859_1_spanish.h \ src_c/stem_UTF_8_spanish.h src_c/stem_ISO_8859_1_swedish.h \ src_c/stem_UTF_8_swedish.h src_c/stem_UTF_8_turkish.h \ include/libstemmer.h libstemmer/modules.h runtime/api.h \ runtime/header.h HEADERS = $(noinst_HEADERS) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CONFDIR = @CONFDIR@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBRT = @LIBRT@ LIBS = @LIBS@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PGSQL_CFLAGS = @PGSQL_CFLAGS@ PGSQL_LIBS = @PGSQL_LIBS@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pgconfig = @pgconfig@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ @USE_LIBSTEMMER_TRUE@noinst_LIBRARIES = libstemmer.a @USE_LIBSTEMMER_TRUE@snowball_sources = \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_danish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_danish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_dutch.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_dutch.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_english.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_english.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_finnish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_finnish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_french.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_french.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_german.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_german.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_hungarian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_hungarian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_italian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_italian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_norwegian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_norwegian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_porter.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_porter.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_portuguese.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_portuguese.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_2_romanian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_romanian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_KOI8_R_russian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_russian.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_spanish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_spanish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_swedish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_swedish.c \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_turkish.c \ @USE_LIBSTEMMER_TRUE@ runtime/api.c \ @USE_LIBSTEMMER_TRUE@ runtime/utilities.c \ @USE_LIBSTEMMER_TRUE@ libstemmer/libstemmer.c @USE_LIBSTEMMER_TRUE@snowball_headers = \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_danish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_danish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_dutch.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_dutch.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_english.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_english.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_finnish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_finnish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_french.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_french.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_german.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_german.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_hungarian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_hungarian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_italian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_italian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_norwegian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_norwegian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_porter.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_porter.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_portuguese.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_portuguese.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_2_romanian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_romanian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_KOI8_R_russian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_russian.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_spanish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_spanish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_ISO_8859_1_swedish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_swedish.h \ @USE_LIBSTEMMER_TRUE@ src_c/stem_UTF_8_turkish.h \ @USE_LIBSTEMMER_TRUE@ include/libstemmer.h \ @USE_LIBSTEMMER_TRUE@ libstemmer/modules.h \ @USE_LIBSTEMMER_TRUE@ runtime/api.h \ @USE_LIBSTEMMER_TRUE@ runtime/header.h @USE_LIBSTEMMER_TRUE@noinst_HEADERS = $(snowball_headers) @USE_LIBSTEMMER_TRUE@libstemmer_a_SOURCES = $(snowball_sources) all: all-am .SUFFIXES: .SUFFIXES: .c .o .obj $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(srcdir)/mkinc.mak $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign libstemmer_c/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign libstemmer_c/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLIBRARIES: -test -z "$(noinst_LIBRARIES)" || rm -f $(noinst_LIBRARIES) libstemmer.a: $(libstemmer_a_OBJECTS) $(libstemmer_a_DEPENDENCIES) -rm -f libstemmer.a $(libstemmer_a_AR) libstemmer.a $(libstemmer_a_OBJECTS) $(libstemmer_a_LIBADD) $(RANLIB) libstemmer.a mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/api.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libstemmer.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_danish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_dutch.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_english.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_finnish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_french.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_german.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_hungarian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_italian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_norwegian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_porter.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_portuguese.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_spanish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_1_swedish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_ISO_8859_2_romanian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_KOI8_R_russian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_danish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_dutch.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_english.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_finnish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_french.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_german.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_hungarian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_italian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_norwegian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_porter.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_portuguese.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_romanian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_russian.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_spanish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_swedish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/stem_UTF_8_turkish.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/utilities.Po@am__quote@ .c.o: @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c $< .c.obj: @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'` stem_ISO_8859_1_danish.o: src_c/stem_ISO_8859_1_danish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_danish.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_danish.Tpo -c -o stem_ISO_8859_1_danish.o `test -f 'src_c/stem_ISO_8859_1_danish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_danish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_danish.Tpo $(DEPDIR)/stem_ISO_8859_1_danish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_danish.c' object='stem_ISO_8859_1_danish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_danish.o `test -f 'src_c/stem_ISO_8859_1_danish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_danish.c stem_ISO_8859_1_danish.obj: src_c/stem_ISO_8859_1_danish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_danish.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_danish.Tpo -c -o stem_ISO_8859_1_danish.obj `if test -f 'src_c/stem_ISO_8859_1_danish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_danish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_danish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_danish.Tpo $(DEPDIR)/stem_ISO_8859_1_danish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_danish.c' object='stem_ISO_8859_1_danish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_danish.obj `if test -f 'src_c/stem_ISO_8859_1_danish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_danish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_danish.c'; fi` stem_UTF_8_danish.o: src_c/stem_UTF_8_danish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_danish.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_danish.Tpo -c -o stem_UTF_8_danish.o `test -f 'src_c/stem_UTF_8_danish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_danish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_danish.Tpo $(DEPDIR)/stem_UTF_8_danish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_danish.c' object='stem_UTF_8_danish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_danish.o `test -f 'src_c/stem_UTF_8_danish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_danish.c stem_UTF_8_danish.obj: src_c/stem_UTF_8_danish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_danish.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_danish.Tpo -c -o stem_UTF_8_danish.obj `if test -f 'src_c/stem_UTF_8_danish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_danish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_danish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_danish.Tpo $(DEPDIR)/stem_UTF_8_danish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_danish.c' object='stem_UTF_8_danish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_danish.obj `if test -f 'src_c/stem_UTF_8_danish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_danish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_danish.c'; fi` stem_ISO_8859_1_dutch.o: src_c/stem_ISO_8859_1_dutch.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_dutch.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_dutch.Tpo -c -o stem_ISO_8859_1_dutch.o `test -f 'src_c/stem_ISO_8859_1_dutch.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_dutch.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_dutch.Tpo $(DEPDIR)/stem_ISO_8859_1_dutch.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_dutch.c' object='stem_ISO_8859_1_dutch.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_dutch.o `test -f 'src_c/stem_ISO_8859_1_dutch.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_dutch.c stem_ISO_8859_1_dutch.obj: src_c/stem_ISO_8859_1_dutch.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_dutch.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_dutch.Tpo -c -o stem_ISO_8859_1_dutch.obj `if test -f 'src_c/stem_ISO_8859_1_dutch.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_dutch.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_dutch.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_dutch.Tpo $(DEPDIR)/stem_ISO_8859_1_dutch.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_dutch.c' object='stem_ISO_8859_1_dutch.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_dutch.obj `if test -f 'src_c/stem_ISO_8859_1_dutch.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_dutch.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_dutch.c'; fi` stem_UTF_8_dutch.o: src_c/stem_UTF_8_dutch.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_dutch.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_dutch.Tpo -c -o stem_UTF_8_dutch.o `test -f 'src_c/stem_UTF_8_dutch.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_dutch.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_dutch.Tpo $(DEPDIR)/stem_UTF_8_dutch.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_dutch.c' object='stem_UTF_8_dutch.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_dutch.o `test -f 'src_c/stem_UTF_8_dutch.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_dutch.c stem_UTF_8_dutch.obj: src_c/stem_UTF_8_dutch.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_dutch.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_dutch.Tpo -c -o stem_UTF_8_dutch.obj `if test -f 'src_c/stem_UTF_8_dutch.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_dutch.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_dutch.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_dutch.Tpo $(DEPDIR)/stem_UTF_8_dutch.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_dutch.c' object='stem_UTF_8_dutch.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_dutch.obj `if test -f 'src_c/stem_UTF_8_dutch.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_dutch.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_dutch.c'; fi` stem_ISO_8859_1_english.o: src_c/stem_ISO_8859_1_english.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_english.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_english.Tpo -c -o stem_ISO_8859_1_english.o `test -f 'src_c/stem_ISO_8859_1_english.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_english.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_english.Tpo $(DEPDIR)/stem_ISO_8859_1_english.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_english.c' object='stem_ISO_8859_1_english.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_english.o `test -f 'src_c/stem_ISO_8859_1_english.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_english.c stem_ISO_8859_1_english.obj: src_c/stem_ISO_8859_1_english.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_english.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_english.Tpo -c -o stem_ISO_8859_1_english.obj `if test -f 'src_c/stem_ISO_8859_1_english.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_english.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_english.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_english.Tpo $(DEPDIR)/stem_ISO_8859_1_english.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_english.c' object='stem_ISO_8859_1_english.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_english.obj `if test -f 'src_c/stem_ISO_8859_1_english.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_english.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_english.c'; fi` stem_UTF_8_english.o: src_c/stem_UTF_8_english.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_english.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_english.Tpo -c -o stem_UTF_8_english.o `test -f 'src_c/stem_UTF_8_english.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_english.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_english.Tpo $(DEPDIR)/stem_UTF_8_english.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_english.c' object='stem_UTF_8_english.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_english.o `test -f 'src_c/stem_UTF_8_english.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_english.c stem_UTF_8_english.obj: src_c/stem_UTF_8_english.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_english.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_english.Tpo -c -o stem_UTF_8_english.obj `if test -f 'src_c/stem_UTF_8_english.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_english.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_english.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_english.Tpo $(DEPDIR)/stem_UTF_8_english.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_english.c' object='stem_UTF_8_english.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_english.obj `if test -f 'src_c/stem_UTF_8_english.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_english.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_english.c'; fi` stem_ISO_8859_1_finnish.o: src_c/stem_ISO_8859_1_finnish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_finnish.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_finnish.Tpo -c -o stem_ISO_8859_1_finnish.o `test -f 'src_c/stem_ISO_8859_1_finnish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_finnish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_finnish.Tpo $(DEPDIR)/stem_ISO_8859_1_finnish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_finnish.c' object='stem_ISO_8859_1_finnish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_finnish.o `test -f 'src_c/stem_ISO_8859_1_finnish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_finnish.c stem_ISO_8859_1_finnish.obj: src_c/stem_ISO_8859_1_finnish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_finnish.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_finnish.Tpo -c -o stem_ISO_8859_1_finnish.obj `if test -f 'src_c/stem_ISO_8859_1_finnish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_finnish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_finnish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_finnish.Tpo $(DEPDIR)/stem_ISO_8859_1_finnish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_finnish.c' object='stem_ISO_8859_1_finnish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_finnish.obj `if test -f 'src_c/stem_ISO_8859_1_finnish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_finnish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_finnish.c'; fi` stem_UTF_8_finnish.o: src_c/stem_UTF_8_finnish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_finnish.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_finnish.Tpo -c -o stem_UTF_8_finnish.o `test -f 'src_c/stem_UTF_8_finnish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_finnish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_finnish.Tpo $(DEPDIR)/stem_UTF_8_finnish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_finnish.c' object='stem_UTF_8_finnish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_finnish.o `test -f 'src_c/stem_UTF_8_finnish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_finnish.c stem_UTF_8_finnish.obj: src_c/stem_UTF_8_finnish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_finnish.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_finnish.Tpo -c -o stem_UTF_8_finnish.obj `if test -f 'src_c/stem_UTF_8_finnish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_finnish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_finnish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_finnish.Tpo $(DEPDIR)/stem_UTF_8_finnish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_finnish.c' object='stem_UTF_8_finnish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_finnish.obj `if test -f 'src_c/stem_UTF_8_finnish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_finnish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_finnish.c'; fi` stem_ISO_8859_1_french.o: src_c/stem_ISO_8859_1_french.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_french.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_french.Tpo -c -o stem_ISO_8859_1_french.o `test -f 'src_c/stem_ISO_8859_1_french.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_french.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_french.Tpo $(DEPDIR)/stem_ISO_8859_1_french.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_french.c' object='stem_ISO_8859_1_french.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_french.o `test -f 'src_c/stem_ISO_8859_1_french.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_french.c stem_ISO_8859_1_french.obj: src_c/stem_ISO_8859_1_french.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_french.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_french.Tpo -c -o stem_ISO_8859_1_french.obj `if test -f 'src_c/stem_ISO_8859_1_french.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_french.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_french.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_french.Tpo $(DEPDIR)/stem_ISO_8859_1_french.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_french.c' object='stem_ISO_8859_1_french.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_french.obj `if test -f 'src_c/stem_ISO_8859_1_french.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_french.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_french.c'; fi` stem_UTF_8_french.o: src_c/stem_UTF_8_french.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_french.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_french.Tpo -c -o stem_UTF_8_french.o `test -f 'src_c/stem_UTF_8_french.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_french.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_french.Tpo $(DEPDIR)/stem_UTF_8_french.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_french.c' object='stem_UTF_8_french.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_french.o `test -f 'src_c/stem_UTF_8_french.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_french.c stem_UTF_8_french.obj: src_c/stem_UTF_8_french.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_french.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_french.Tpo -c -o stem_UTF_8_french.obj `if test -f 'src_c/stem_UTF_8_french.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_french.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_french.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_french.Tpo $(DEPDIR)/stem_UTF_8_french.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_french.c' object='stem_UTF_8_french.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_french.obj `if test -f 'src_c/stem_UTF_8_french.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_french.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_french.c'; fi` stem_ISO_8859_1_german.o: src_c/stem_ISO_8859_1_german.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_german.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_german.Tpo -c -o stem_ISO_8859_1_german.o `test -f 'src_c/stem_ISO_8859_1_german.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_german.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_german.Tpo $(DEPDIR)/stem_ISO_8859_1_german.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_german.c' object='stem_ISO_8859_1_german.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_german.o `test -f 'src_c/stem_ISO_8859_1_german.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_german.c stem_ISO_8859_1_german.obj: src_c/stem_ISO_8859_1_german.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_german.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_german.Tpo -c -o stem_ISO_8859_1_german.obj `if test -f 'src_c/stem_ISO_8859_1_german.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_german.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_german.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_german.Tpo $(DEPDIR)/stem_ISO_8859_1_german.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_german.c' object='stem_ISO_8859_1_german.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_german.obj `if test -f 'src_c/stem_ISO_8859_1_german.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_german.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_german.c'; fi` stem_UTF_8_german.o: src_c/stem_UTF_8_german.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_german.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_german.Tpo -c -o stem_UTF_8_german.o `test -f 'src_c/stem_UTF_8_german.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_german.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_german.Tpo $(DEPDIR)/stem_UTF_8_german.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_german.c' object='stem_UTF_8_german.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_german.o `test -f 'src_c/stem_UTF_8_german.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_german.c stem_UTF_8_german.obj: src_c/stem_UTF_8_german.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_german.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_german.Tpo -c -o stem_UTF_8_german.obj `if test -f 'src_c/stem_UTF_8_german.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_german.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_german.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_german.Tpo $(DEPDIR)/stem_UTF_8_german.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_german.c' object='stem_UTF_8_german.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_german.obj `if test -f 'src_c/stem_UTF_8_german.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_german.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_german.c'; fi` stem_ISO_8859_1_hungarian.o: src_c/stem_ISO_8859_1_hungarian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_hungarian.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_hungarian.Tpo -c -o stem_ISO_8859_1_hungarian.o `test -f 'src_c/stem_ISO_8859_1_hungarian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_hungarian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_hungarian.Tpo $(DEPDIR)/stem_ISO_8859_1_hungarian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_hungarian.c' object='stem_ISO_8859_1_hungarian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_hungarian.o `test -f 'src_c/stem_ISO_8859_1_hungarian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_hungarian.c stem_ISO_8859_1_hungarian.obj: src_c/stem_ISO_8859_1_hungarian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_hungarian.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_hungarian.Tpo -c -o stem_ISO_8859_1_hungarian.obj `if test -f 'src_c/stem_ISO_8859_1_hungarian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_hungarian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_hungarian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_hungarian.Tpo $(DEPDIR)/stem_ISO_8859_1_hungarian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_hungarian.c' object='stem_ISO_8859_1_hungarian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_hungarian.obj `if test -f 'src_c/stem_ISO_8859_1_hungarian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_hungarian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_hungarian.c'; fi` stem_UTF_8_hungarian.o: src_c/stem_UTF_8_hungarian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_hungarian.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_hungarian.Tpo -c -o stem_UTF_8_hungarian.o `test -f 'src_c/stem_UTF_8_hungarian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_hungarian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_hungarian.Tpo $(DEPDIR)/stem_UTF_8_hungarian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_hungarian.c' object='stem_UTF_8_hungarian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_hungarian.o `test -f 'src_c/stem_UTF_8_hungarian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_hungarian.c stem_UTF_8_hungarian.obj: src_c/stem_UTF_8_hungarian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_hungarian.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_hungarian.Tpo -c -o stem_UTF_8_hungarian.obj `if test -f 'src_c/stem_UTF_8_hungarian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_hungarian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_hungarian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_hungarian.Tpo $(DEPDIR)/stem_UTF_8_hungarian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_hungarian.c' object='stem_UTF_8_hungarian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_hungarian.obj `if test -f 'src_c/stem_UTF_8_hungarian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_hungarian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_hungarian.c'; fi` stem_ISO_8859_1_italian.o: src_c/stem_ISO_8859_1_italian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_italian.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_italian.Tpo -c -o stem_ISO_8859_1_italian.o `test -f 'src_c/stem_ISO_8859_1_italian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_italian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_italian.Tpo $(DEPDIR)/stem_ISO_8859_1_italian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_italian.c' object='stem_ISO_8859_1_italian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_italian.o `test -f 'src_c/stem_ISO_8859_1_italian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_italian.c stem_ISO_8859_1_italian.obj: src_c/stem_ISO_8859_1_italian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_italian.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_italian.Tpo -c -o stem_ISO_8859_1_italian.obj `if test -f 'src_c/stem_ISO_8859_1_italian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_italian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_italian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_italian.Tpo $(DEPDIR)/stem_ISO_8859_1_italian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_italian.c' object='stem_ISO_8859_1_italian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_italian.obj `if test -f 'src_c/stem_ISO_8859_1_italian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_italian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_italian.c'; fi` stem_UTF_8_italian.o: src_c/stem_UTF_8_italian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_italian.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_italian.Tpo -c -o stem_UTF_8_italian.o `test -f 'src_c/stem_UTF_8_italian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_italian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_italian.Tpo $(DEPDIR)/stem_UTF_8_italian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_italian.c' object='stem_UTF_8_italian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_italian.o `test -f 'src_c/stem_UTF_8_italian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_italian.c stem_UTF_8_italian.obj: src_c/stem_UTF_8_italian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_italian.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_italian.Tpo -c -o stem_UTF_8_italian.obj `if test -f 'src_c/stem_UTF_8_italian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_italian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_italian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_italian.Tpo $(DEPDIR)/stem_UTF_8_italian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_italian.c' object='stem_UTF_8_italian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_italian.obj `if test -f 'src_c/stem_UTF_8_italian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_italian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_italian.c'; fi` stem_ISO_8859_1_norwegian.o: src_c/stem_ISO_8859_1_norwegian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_norwegian.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_norwegian.Tpo -c -o stem_ISO_8859_1_norwegian.o `test -f 'src_c/stem_ISO_8859_1_norwegian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_norwegian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_norwegian.Tpo $(DEPDIR)/stem_ISO_8859_1_norwegian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_norwegian.c' object='stem_ISO_8859_1_norwegian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_norwegian.o `test -f 'src_c/stem_ISO_8859_1_norwegian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_norwegian.c stem_ISO_8859_1_norwegian.obj: src_c/stem_ISO_8859_1_norwegian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_norwegian.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_norwegian.Tpo -c -o stem_ISO_8859_1_norwegian.obj `if test -f 'src_c/stem_ISO_8859_1_norwegian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_norwegian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_norwegian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_norwegian.Tpo $(DEPDIR)/stem_ISO_8859_1_norwegian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_norwegian.c' object='stem_ISO_8859_1_norwegian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_norwegian.obj `if test -f 'src_c/stem_ISO_8859_1_norwegian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_norwegian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_norwegian.c'; fi` stem_UTF_8_norwegian.o: src_c/stem_UTF_8_norwegian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_norwegian.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_norwegian.Tpo -c -o stem_UTF_8_norwegian.o `test -f 'src_c/stem_UTF_8_norwegian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_norwegian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_norwegian.Tpo $(DEPDIR)/stem_UTF_8_norwegian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_norwegian.c' object='stem_UTF_8_norwegian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_norwegian.o `test -f 'src_c/stem_UTF_8_norwegian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_norwegian.c stem_UTF_8_norwegian.obj: src_c/stem_UTF_8_norwegian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_norwegian.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_norwegian.Tpo -c -o stem_UTF_8_norwegian.obj `if test -f 'src_c/stem_UTF_8_norwegian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_norwegian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_norwegian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_norwegian.Tpo $(DEPDIR)/stem_UTF_8_norwegian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_norwegian.c' object='stem_UTF_8_norwegian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_norwegian.obj `if test -f 'src_c/stem_UTF_8_norwegian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_norwegian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_norwegian.c'; fi` stem_ISO_8859_1_porter.o: src_c/stem_ISO_8859_1_porter.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_porter.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_porter.Tpo -c -o stem_ISO_8859_1_porter.o `test -f 'src_c/stem_ISO_8859_1_porter.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_porter.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_porter.Tpo $(DEPDIR)/stem_ISO_8859_1_porter.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_porter.c' object='stem_ISO_8859_1_porter.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_porter.o `test -f 'src_c/stem_ISO_8859_1_porter.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_porter.c stem_ISO_8859_1_porter.obj: src_c/stem_ISO_8859_1_porter.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_porter.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_porter.Tpo -c -o stem_ISO_8859_1_porter.obj `if test -f 'src_c/stem_ISO_8859_1_porter.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_porter.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_porter.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_porter.Tpo $(DEPDIR)/stem_ISO_8859_1_porter.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_porter.c' object='stem_ISO_8859_1_porter.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_porter.obj `if test -f 'src_c/stem_ISO_8859_1_porter.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_porter.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_porter.c'; fi` stem_UTF_8_porter.o: src_c/stem_UTF_8_porter.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_porter.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_porter.Tpo -c -o stem_UTF_8_porter.o `test -f 'src_c/stem_UTF_8_porter.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_porter.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_porter.Tpo $(DEPDIR)/stem_UTF_8_porter.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_porter.c' object='stem_UTF_8_porter.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_porter.o `test -f 'src_c/stem_UTF_8_porter.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_porter.c stem_UTF_8_porter.obj: src_c/stem_UTF_8_porter.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_porter.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_porter.Tpo -c -o stem_UTF_8_porter.obj `if test -f 'src_c/stem_UTF_8_porter.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_porter.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_porter.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_porter.Tpo $(DEPDIR)/stem_UTF_8_porter.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_porter.c' object='stem_UTF_8_porter.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_porter.obj `if test -f 'src_c/stem_UTF_8_porter.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_porter.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_porter.c'; fi` stem_ISO_8859_1_portuguese.o: src_c/stem_ISO_8859_1_portuguese.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_portuguese.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_portuguese.Tpo -c -o stem_ISO_8859_1_portuguese.o `test -f 'src_c/stem_ISO_8859_1_portuguese.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_portuguese.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_portuguese.Tpo $(DEPDIR)/stem_ISO_8859_1_portuguese.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_portuguese.c' object='stem_ISO_8859_1_portuguese.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_portuguese.o `test -f 'src_c/stem_ISO_8859_1_portuguese.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_portuguese.c stem_ISO_8859_1_portuguese.obj: src_c/stem_ISO_8859_1_portuguese.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_portuguese.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_portuguese.Tpo -c -o stem_ISO_8859_1_portuguese.obj `if test -f 'src_c/stem_ISO_8859_1_portuguese.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_portuguese.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_portuguese.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_portuguese.Tpo $(DEPDIR)/stem_ISO_8859_1_portuguese.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_portuguese.c' object='stem_ISO_8859_1_portuguese.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_portuguese.obj `if test -f 'src_c/stem_ISO_8859_1_portuguese.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_portuguese.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_portuguese.c'; fi` stem_UTF_8_portuguese.o: src_c/stem_UTF_8_portuguese.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_portuguese.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_portuguese.Tpo -c -o stem_UTF_8_portuguese.o `test -f 'src_c/stem_UTF_8_portuguese.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_portuguese.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_portuguese.Tpo $(DEPDIR)/stem_UTF_8_portuguese.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_portuguese.c' object='stem_UTF_8_portuguese.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_portuguese.o `test -f 'src_c/stem_UTF_8_portuguese.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_portuguese.c stem_UTF_8_portuguese.obj: src_c/stem_UTF_8_portuguese.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_portuguese.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_portuguese.Tpo -c -o stem_UTF_8_portuguese.obj `if test -f 'src_c/stem_UTF_8_portuguese.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_portuguese.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_portuguese.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_portuguese.Tpo $(DEPDIR)/stem_UTF_8_portuguese.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_portuguese.c' object='stem_UTF_8_portuguese.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_portuguese.obj `if test -f 'src_c/stem_UTF_8_portuguese.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_portuguese.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_portuguese.c'; fi` stem_ISO_8859_2_romanian.o: src_c/stem_ISO_8859_2_romanian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_2_romanian.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_2_romanian.Tpo -c -o stem_ISO_8859_2_romanian.o `test -f 'src_c/stem_ISO_8859_2_romanian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_2_romanian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_2_romanian.Tpo $(DEPDIR)/stem_ISO_8859_2_romanian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_2_romanian.c' object='stem_ISO_8859_2_romanian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_2_romanian.o `test -f 'src_c/stem_ISO_8859_2_romanian.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_2_romanian.c stem_ISO_8859_2_romanian.obj: src_c/stem_ISO_8859_2_romanian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_2_romanian.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_2_romanian.Tpo -c -o stem_ISO_8859_2_romanian.obj `if test -f 'src_c/stem_ISO_8859_2_romanian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_2_romanian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_2_romanian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_2_romanian.Tpo $(DEPDIR)/stem_ISO_8859_2_romanian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_2_romanian.c' object='stem_ISO_8859_2_romanian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_2_romanian.obj `if test -f 'src_c/stem_ISO_8859_2_romanian.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_2_romanian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_2_romanian.c'; fi` stem_UTF_8_romanian.o: src_c/stem_UTF_8_romanian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_romanian.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_romanian.Tpo -c -o stem_UTF_8_romanian.o `test -f 'src_c/stem_UTF_8_romanian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_romanian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_romanian.Tpo $(DEPDIR)/stem_UTF_8_romanian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_romanian.c' object='stem_UTF_8_romanian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_romanian.o `test -f 'src_c/stem_UTF_8_romanian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_romanian.c stem_UTF_8_romanian.obj: src_c/stem_UTF_8_romanian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_romanian.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_romanian.Tpo -c -o stem_UTF_8_romanian.obj `if test -f 'src_c/stem_UTF_8_romanian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_romanian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_romanian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_romanian.Tpo $(DEPDIR)/stem_UTF_8_romanian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_romanian.c' object='stem_UTF_8_romanian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_romanian.obj `if test -f 'src_c/stem_UTF_8_romanian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_romanian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_romanian.c'; fi` stem_KOI8_R_russian.o: src_c/stem_KOI8_R_russian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_KOI8_R_russian.o -MD -MP -MF $(DEPDIR)/stem_KOI8_R_russian.Tpo -c -o stem_KOI8_R_russian.o `test -f 'src_c/stem_KOI8_R_russian.c' || echo '$(srcdir)/'`src_c/stem_KOI8_R_russian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_KOI8_R_russian.Tpo $(DEPDIR)/stem_KOI8_R_russian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_KOI8_R_russian.c' object='stem_KOI8_R_russian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_KOI8_R_russian.o `test -f 'src_c/stem_KOI8_R_russian.c' || echo '$(srcdir)/'`src_c/stem_KOI8_R_russian.c stem_KOI8_R_russian.obj: src_c/stem_KOI8_R_russian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_KOI8_R_russian.obj -MD -MP -MF $(DEPDIR)/stem_KOI8_R_russian.Tpo -c -o stem_KOI8_R_russian.obj `if test -f 'src_c/stem_KOI8_R_russian.c'; then $(CYGPATH_W) 'src_c/stem_KOI8_R_russian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_KOI8_R_russian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_KOI8_R_russian.Tpo $(DEPDIR)/stem_KOI8_R_russian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_KOI8_R_russian.c' object='stem_KOI8_R_russian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_KOI8_R_russian.obj `if test -f 'src_c/stem_KOI8_R_russian.c'; then $(CYGPATH_W) 'src_c/stem_KOI8_R_russian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_KOI8_R_russian.c'; fi` stem_UTF_8_russian.o: src_c/stem_UTF_8_russian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_russian.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_russian.Tpo -c -o stem_UTF_8_russian.o `test -f 'src_c/stem_UTF_8_russian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_russian.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_russian.Tpo $(DEPDIR)/stem_UTF_8_russian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_russian.c' object='stem_UTF_8_russian.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_russian.o `test -f 'src_c/stem_UTF_8_russian.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_russian.c stem_UTF_8_russian.obj: src_c/stem_UTF_8_russian.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_russian.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_russian.Tpo -c -o stem_UTF_8_russian.obj `if test -f 'src_c/stem_UTF_8_russian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_russian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_russian.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_russian.Tpo $(DEPDIR)/stem_UTF_8_russian.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_russian.c' object='stem_UTF_8_russian.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_russian.obj `if test -f 'src_c/stem_UTF_8_russian.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_russian.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_russian.c'; fi` stem_ISO_8859_1_spanish.o: src_c/stem_ISO_8859_1_spanish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_spanish.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_spanish.Tpo -c -o stem_ISO_8859_1_spanish.o `test -f 'src_c/stem_ISO_8859_1_spanish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_spanish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_spanish.Tpo $(DEPDIR)/stem_ISO_8859_1_spanish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_spanish.c' object='stem_ISO_8859_1_spanish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_spanish.o `test -f 'src_c/stem_ISO_8859_1_spanish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_spanish.c stem_ISO_8859_1_spanish.obj: src_c/stem_ISO_8859_1_spanish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_spanish.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_spanish.Tpo -c -o stem_ISO_8859_1_spanish.obj `if test -f 'src_c/stem_ISO_8859_1_spanish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_spanish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_spanish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_spanish.Tpo $(DEPDIR)/stem_ISO_8859_1_spanish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_spanish.c' object='stem_ISO_8859_1_spanish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_spanish.obj `if test -f 'src_c/stem_ISO_8859_1_spanish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_spanish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_spanish.c'; fi` stem_UTF_8_spanish.o: src_c/stem_UTF_8_spanish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_spanish.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_spanish.Tpo -c -o stem_UTF_8_spanish.o `test -f 'src_c/stem_UTF_8_spanish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_spanish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_spanish.Tpo $(DEPDIR)/stem_UTF_8_spanish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_spanish.c' object='stem_UTF_8_spanish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_spanish.o `test -f 'src_c/stem_UTF_8_spanish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_spanish.c stem_UTF_8_spanish.obj: src_c/stem_UTF_8_spanish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_spanish.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_spanish.Tpo -c -o stem_UTF_8_spanish.obj `if test -f 'src_c/stem_UTF_8_spanish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_spanish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_spanish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_spanish.Tpo $(DEPDIR)/stem_UTF_8_spanish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_spanish.c' object='stem_UTF_8_spanish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_spanish.obj `if test -f 'src_c/stem_UTF_8_spanish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_spanish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_spanish.c'; fi` stem_ISO_8859_1_swedish.o: src_c/stem_ISO_8859_1_swedish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_swedish.o -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_swedish.Tpo -c -o stem_ISO_8859_1_swedish.o `test -f 'src_c/stem_ISO_8859_1_swedish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_swedish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_swedish.Tpo $(DEPDIR)/stem_ISO_8859_1_swedish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_swedish.c' object='stem_ISO_8859_1_swedish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_swedish.o `test -f 'src_c/stem_ISO_8859_1_swedish.c' || echo '$(srcdir)/'`src_c/stem_ISO_8859_1_swedish.c stem_ISO_8859_1_swedish.obj: src_c/stem_ISO_8859_1_swedish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_ISO_8859_1_swedish.obj -MD -MP -MF $(DEPDIR)/stem_ISO_8859_1_swedish.Tpo -c -o stem_ISO_8859_1_swedish.obj `if test -f 'src_c/stem_ISO_8859_1_swedish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_swedish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_swedish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_ISO_8859_1_swedish.Tpo $(DEPDIR)/stem_ISO_8859_1_swedish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_ISO_8859_1_swedish.c' object='stem_ISO_8859_1_swedish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_ISO_8859_1_swedish.obj `if test -f 'src_c/stem_ISO_8859_1_swedish.c'; then $(CYGPATH_W) 'src_c/stem_ISO_8859_1_swedish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_ISO_8859_1_swedish.c'; fi` stem_UTF_8_swedish.o: src_c/stem_UTF_8_swedish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_swedish.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_swedish.Tpo -c -o stem_UTF_8_swedish.o `test -f 'src_c/stem_UTF_8_swedish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_swedish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_swedish.Tpo $(DEPDIR)/stem_UTF_8_swedish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_swedish.c' object='stem_UTF_8_swedish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_swedish.o `test -f 'src_c/stem_UTF_8_swedish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_swedish.c stem_UTF_8_swedish.obj: src_c/stem_UTF_8_swedish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_swedish.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_swedish.Tpo -c -o stem_UTF_8_swedish.obj `if test -f 'src_c/stem_UTF_8_swedish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_swedish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_swedish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_swedish.Tpo $(DEPDIR)/stem_UTF_8_swedish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_swedish.c' object='stem_UTF_8_swedish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_swedish.obj `if test -f 'src_c/stem_UTF_8_swedish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_swedish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_swedish.c'; fi` stem_UTF_8_turkish.o: src_c/stem_UTF_8_turkish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_turkish.o -MD -MP -MF $(DEPDIR)/stem_UTF_8_turkish.Tpo -c -o stem_UTF_8_turkish.o `test -f 'src_c/stem_UTF_8_turkish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_turkish.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_turkish.Tpo $(DEPDIR)/stem_UTF_8_turkish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_turkish.c' object='stem_UTF_8_turkish.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_turkish.o `test -f 'src_c/stem_UTF_8_turkish.c' || echo '$(srcdir)/'`src_c/stem_UTF_8_turkish.c stem_UTF_8_turkish.obj: src_c/stem_UTF_8_turkish.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT stem_UTF_8_turkish.obj -MD -MP -MF $(DEPDIR)/stem_UTF_8_turkish.Tpo -c -o stem_UTF_8_turkish.obj `if test -f 'src_c/stem_UTF_8_turkish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_turkish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_turkish.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/stem_UTF_8_turkish.Tpo $(DEPDIR)/stem_UTF_8_turkish.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src_c/stem_UTF_8_turkish.c' object='stem_UTF_8_turkish.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o stem_UTF_8_turkish.obj `if test -f 'src_c/stem_UTF_8_turkish.c'; then $(CYGPATH_W) 'src_c/stem_UTF_8_turkish.c'; else $(CYGPATH_W) '$(srcdir)/src_c/stem_UTF_8_turkish.c'; fi` api.o: runtime/api.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT api.o -MD -MP -MF $(DEPDIR)/api.Tpo -c -o api.o `test -f 'runtime/api.c' || echo '$(srcdir)/'`runtime/api.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/api.Tpo $(DEPDIR)/api.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='runtime/api.c' object='api.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o api.o `test -f 'runtime/api.c' || echo '$(srcdir)/'`runtime/api.c api.obj: runtime/api.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT api.obj -MD -MP -MF $(DEPDIR)/api.Tpo -c -o api.obj `if test -f 'runtime/api.c'; then $(CYGPATH_W) 'runtime/api.c'; else $(CYGPATH_W) '$(srcdir)/runtime/api.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/api.Tpo $(DEPDIR)/api.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='runtime/api.c' object='api.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o api.obj `if test -f 'runtime/api.c'; then $(CYGPATH_W) 'runtime/api.c'; else $(CYGPATH_W) '$(srcdir)/runtime/api.c'; fi` utilities.o: runtime/utilities.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT utilities.o -MD -MP -MF $(DEPDIR)/utilities.Tpo -c -o utilities.o `test -f 'runtime/utilities.c' || echo '$(srcdir)/'`runtime/utilities.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/utilities.Tpo $(DEPDIR)/utilities.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='runtime/utilities.c' object='utilities.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o utilities.o `test -f 'runtime/utilities.c' || echo '$(srcdir)/'`runtime/utilities.c utilities.obj: runtime/utilities.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT utilities.obj -MD -MP -MF $(DEPDIR)/utilities.Tpo -c -o utilities.obj `if test -f 'runtime/utilities.c'; then $(CYGPATH_W) 'runtime/utilities.c'; else $(CYGPATH_W) '$(srcdir)/runtime/utilities.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/utilities.Tpo $(DEPDIR)/utilities.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='runtime/utilities.c' object='utilities.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o utilities.obj `if test -f 'runtime/utilities.c'; then $(CYGPATH_W) 'runtime/utilities.c'; else $(CYGPATH_W) '$(srcdir)/runtime/utilities.c'; fi` libstemmer.o: libstemmer/libstemmer.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libstemmer.o -MD -MP -MF $(DEPDIR)/libstemmer.Tpo -c -o libstemmer.o `test -f 'libstemmer/libstemmer.c' || echo '$(srcdir)/'`libstemmer/libstemmer.c @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/libstemmer.Tpo $(DEPDIR)/libstemmer.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='libstemmer/libstemmer.c' object='libstemmer.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libstemmer.o `test -f 'libstemmer/libstemmer.c' || echo '$(srcdir)/'`libstemmer/libstemmer.c libstemmer.obj: libstemmer/libstemmer.c @am__fastdepCC_TRUE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT libstemmer.obj -MD -MP -MF $(DEPDIR)/libstemmer.Tpo -c -o libstemmer.obj `if test -f 'libstemmer/libstemmer.c'; then $(CYGPATH_W) 'libstemmer/libstemmer.c'; else $(CYGPATH_W) '$(srcdir)/libstemmer/libstemmer.c'; fi` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/libstemmer.Tpo $(DEPDIR)/libstemmer.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='libstemmer/libstemmer.c' object='libstemmer.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o libstemmer.obj `if test -f 'libstemmer/libstemmer.c'; then $(CYGPATH_W) 'libstemmer/libstemmer.c'; else $(CYGPATH_W) '$(srcdir)/libstemmer/libstemmer.c'; fi` ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LIBRARIES) $(HEADERS) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-noinstLIBRARIES mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ clean-noinstLIBRARIES ctags distclean distclean-compile \ distclean-generic distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sphinx-2.0.4-release/libstemmer_c/mkinc.mak0000644000176700017710000000506710641233210020203 0ustar deogardeogar# libstemmer/mkinc.mak: List of stemming module source files # # This file is generated by mkmodules.pl from a list of module names. # Do not edit manually. # # Modules included by this file are: danish, dutch, english, finnish, french, # german, hungarian, italian, norwegian, porter, portuguese, romanian, # russian, spanish, swedish, turkish snowball_sources= \ src_c/stem_ISO_8859_1_danish.c \ src_c/stem_UTF_8_danish.c \ src_c/stem_ISO_8859_1_dutch.c \ src_c/stem_UTF_8_dutch.c \ src_c/stem_ISO_8859_1_english.c \ src_c/stem_UTF_8_english.c \ src_c/stem_ISO_8859_1_finnish.c \ src_c/stem_UTF_8_finnish.c \ src_c/stem_ISO_8859_1_french.c \ src_c/stem_UTF_8_french.c \ src_c/stem_ISO_8859_1_german.c \ src_c/stem_UTF_8_german.c \ src_c/stem_ISO_8859_1_hungarian.c \ src_c/stem_UTF_8_hungarian.c \ src_c/stem_ISO_8859_1_italian.c \ src_c/stem_UTF_8_italian.c \ src_c/stem_ISO_8859_1_norwegian.c \ src_c/stem_UTF_8_norwegian.c \ src_c/stem_ISO_8859_1_porter.c \ src_c/stem_UTF_8_porter.c \ src_c/stem_ISO_8859_1_portuguese.c \ src_c/stem_UTF_8_portuguese.c \ src_c/stem_ISO_8859_2_romanian.c \ src_c/stem_UTF_8_romanian.c \ src_c/stem_KOI8_R_russian.c \ src_c/stem_UTF_8_russian.c \ src_c/stem_ISO_8859_1_spanish.c \ src_c/stem_UTF_8_spanish.c \ src_c/stem_ISO_8859_1_swedish.c \ src_c/stem_UTF_8_swedish.c \ src_c/stem_UTF_8_turkish.c \ runtime/api.c \ runtime/utilities.c \ libstemmer/libstemmer.c snowball_headers= \ src_c/stem_ISO_8859_1_danish.h \ src_c/stem_UTF_8_danish.h \ src_c/stem_ISO_8859_1_dutch.h \ src_c/stem_UTF_8_dutch.h \ src_c/stem_ISO_8859_1_english.h \ src_c/stem_UTF_8_english.h \ src_c/stem_ISO_8859_1_finnish.h \ src_c/stem_UTF_8_finnish.h \ src_c/stem_ISO_8859_1_french.h \ src_c/stem_UTF_8_french.h \ src_c/stem_ISO_8859_1_german.h \ src_c/stem_UTF_8_german.h \ src_c/stem_ISO_8859_1_hungarian.h \ src_c/stem_UTF_8_hungarian.h \ src_c/stem_ISO_8859_1_italian.h \ src_c/stem_UTF_8_italian.h \ src_c/stem_ISO_8859_1_norwegian.h \ src_c/stem_UTF_8_norwegian.h \ src_c/stem_ISO_8859_1_porter.h \ src_c/stem_UTF_8_porter.h \ src_c/stem_ISO_8859_1_portuguese.h \ src_c/stem_UTF_8_portuguese.h \ src_c/stem_ISO_8859_2_romanian.h \ src_c/stem_UTF_8_romanian.h \ src_c/stem_KOI8_R_russian.h \ src_c/stem_UTF_8_russian.h \ src_c/stem_ISO_8859_1_spanish.h \ src_c/stem_UTF_8_spanish.h \ src_c/stem_ISO_8859_1_swedish.h \ src_c/stem_UTF_8_swedish.h \ src_c/stem_UTF_8_turkish.h \ include/libstemmer.h \ libstemmer/modules.h \ runtime/api.h \ runtime/header.h sphinx-2.0.4-release/libstemmer_c/libstemmer_c.vcproj0000644000176700017710000001372410627023661022314 0ustar deogardeogar sphinx-2.0.4-release/libstemmer_c/README0000644000176700017710000000026211471516735017302 0ustar deogardeogarDummy file for automake. Should be overwritten after libstemmer_c.tgz is properly extracted. The latest should be available at http://snowball.tartarus.org/dist/libstemmer_c.tgz sphinx-2.0.4-release/Makefile.in0000644000176700017710000006014411574674605016034 0ustar deogardeogar# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : subdir = . DIST_COMMON = $(am__configure_deps) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/sphinx-min.conf.in \ $(srcdir)/sphinx.conf.in $(top_srcdir)/config/config.h.in \ $(top_srcdir)/configure COPYING INSTALL config/depcomp \ config/install-sh config/missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config/config.h CONFIG_CLEAN_FILES = sphinx.conf.dist sphinx-min.conf.dist CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-dvi-recursive install-exec-recursive \ install-html-recursive install-info-recursive \ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__installdirs = "$(DESTDIR)$(sysconfdir)" DATA = $(sysconf_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ distdir dist dist-all distcheck ETAGS = etags CTAGS = ctags DIST_SUBDIRS = src test doc libstemmer_c DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ { test ! -d "$(distdir)" \ || { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -fr "$(distdir)"; }; } am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CONFDIR = @CONFDIR@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBRT = @LIBRT@ LIBS = @LIBS@ LTLIBOBJS = @LTLIBOBJS@ MAINT = @MAINT@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ OBJEXT = @OBJEXT@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PGSQL_CFLAGS = @PGSQL_CFLAGS@ PGSQL_LIBS = @PGSQL_LIBS@ RANLIB = @RANLIB@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pgconfig = @pgconfig@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ @USE_LIBSTEMMER_FALSE@SUBDIRS = src test doc @USE_LIBSTEMMER_TRUE@SUBDIRS = libstemmer_c src test doc EXTRA_DIST = api storage sphinx.conf.in sphinx-min.conf.in example.sql sysconf_DATA = sphinx.conf.dist sphinx-min.conf.dist example.sql all: all-recursive .SUFFIXES: am--refresh: @: $(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): config/config.h: config/stamp-h1 @if test ! -f $@; then \ rm -f config/stamp-h1; \ $(MAKE) $(AM_MAKEFLAGS) config/stamp-h1; \ else :; fi config/stamp-h1: $(top_srcdir)/config/config.h.in $(top_builddir)/config.status @rm -f config/stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config/config.h $(top_srcdir)/config/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f config/stamp-h1 touch $@ distclean-hdr: -rm -f config/config.h config/stamp-h1 sphinx.conf.dist: $(top_builddir)/config.status $(srcdir)/sphinx.conf.in cd $(top_builddir) && $(SHELL) ./config.status $@ sphinx-min.conf.dist: $(top_builddir)/config.status $(srcdir)/sphinx-min.conf.in cd $(top_builddir) && $(SHELL) ./config.status $@ install-sysconfDATA: $(sysconf_DATA) @$(NORMAL_INSTALL) test -z "$(sysconfdir)" || $(MKDIR_P) "$(DESTDIR)$(sysconfdir)" @list='$(sysconf_DATA)'; test -n "$(sysconfdir)" || list=; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(sysconfdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(sysconfdir)" || exit $$?; \ done uninstall-sysconfDATA: @$(NORMAL_UNINSTALL) @list='$(sysconf_DATA)'; test -n "$(sysconfdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$files" || exit 0; \ echo " ( cd '$(DESTDIR)$(sysconfdir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(sysconfdir)" && rm -f $$files # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-lzma: distdir tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma $(am__remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lzma*) \ lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @$(am__cd) '$(distuninstallcheck_dir)' \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(sysconfdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-sysconfDATA install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-sysconfDATA .MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) ctags-recursive \ install-am install-data-am install-strip tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am am--refresh check check-am clean clean-generic \ ctags ctags-recursive dist dist-all dist-bzip2 dist-gzip \ dist-lzma dist-shar dist-tarZ dist-xz dist-zip distcheck \ distclean distclean-generic distclean-hdr distclean-tags \ distcleancheck distdir distuninstallcheck dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-data-hook install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ install-sysconfDATA installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags \ tags-recursive uninstall uninstall-am uninstall-sysconfDATA install-data-hook: mkdir -p $(DESTDIR)$(localstatedir)/data && mkdir -p $(DESTDIR)$(localstatedir)/log # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sphinx-2.0.4-release/.hgignore0000644000176700017710000000044511430744557015563 0ustar deogardeogarglob:.svn/ glob:bin/ glob:*.user glob:junk/ glob:*.suo glob:*.ncb glob:Makefile glob:config.status glob:config.log glob:sphinx-min.conf.dist glob:sphinx.conf.dist glob:config/* glob:*.Po glob:test/*/report.txt glob:codeblocks/*.depend glob:codeblocks/*.layout glob:*/sphinxversion.h glob:*.pycsphinx-2.0.4-release/sphinx.workspace0000644000176700017710000000176611420614767017216 0ustar deogardeogar sphinx-2.0.4-release/libexpat/0000755000176700017710000000000011724063141015552 5ustar deogardeogarsphinx-2.0.4-release/libexpat/libexpat.vcproj0000644000176700017710000000715610740500572020621 0ustar deogardeogar sphinx-2.0.4-release/libexpat/libexpat.sln0000644000176700017710000000153011356430305020100 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 9.00 # Visual Studio 2005 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libexpat", "libexpat.vcproj", "{7010C937-28B3-46A2-903C-BEBA2F5EAE4D}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 Release|Win32 = Release|Win32 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {7010C937-28B3-46A2-903C-BEBA2F5EAE4D}.Debug|Win32.ActiveCfg = Debug|Win32 {7010C937-28B3-46A2-903C-BEBA2F5EAE4D}.Debug|Win32.Build.0 = Debug|Win32 {7010C937-28B3-46A2-903C-BEBA2F5EAE4D}.Release|Win32.ActiveCfg = Release|Win32 {7010C937-28B3-46A2-903C-BEBA2F5EAE4D}.Release|Win32.Build.0 = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sphinx-2.0.4-release/mysqlse/0000755000176700017710000000000011724063141015437 5ustar deogardeogarsphinx-2.0.4-release/mysqlse/INSTALL0000644000176700017710000000161311723677010016476 0ustar deogardeogarBuilding MySQL with SphinxSE ============================= Note: BUILD/autorun.sh step on Linux might malfunction with some versions of automake; autorun.sh will not fail but the build will. automake 1.9.6 is known to work. MySQL 5.0.x on Linux --------------------- tar zxvf mysql-5.0.91.tar.gz cp -R mysqlse mysql-5.0.91/sql/sphinx cd mysql-5.0.91 patch -p1 -i sql/sphinx/sphinx.5.0.91.diff sh BUILD/autorun.sh ./configure --with-sphinx-storage-engine make MySQL 5.1.x on Linux --------------------- tar zxvf mysql-5.1.47.tar.gz cp -R -p mysqlse mysql-5.1.47/storage/sphinx cd mysql-5.1.47 sh BUILD/autorun.sh ./configure --with-plugins=sphinx make MySQL 5.0.x on Windows ----------------------- tar zxvf mysql-5.0.91.tar.gz cp -R mysqlse mysql-5.0.91/sql/sphinx cd mysql-5.0.91 patch -p1 -i sql/sphinx/sphinx.5.0.91.diff win\configure.js WITH_SPHINX_STORAGE_ENGINE win\build-vs8 --eof-- sphinx-2.0.4-release/mysqlse/ha_sphinx.h0000644000176700017710000001245511624275006017604 0ustar deogardeogar// // $Id: ha_sphinx.h 2921 2011-08-21 21:35:02Z tomat $ // #ifdef USE_PRAGMA_INTERFACE #pragma interface // gcc class implementation #endif #if MYSQL_VERSION_ID>=50515 #define TABLE_ARG TABLE_SHARE #elif MYSQL_VERSION_ID>50100 #define TABLE_ARG st_table_share #else #define TABLE_ARG st_table #endif #if MYSQL_VERSION_ID>=50120 typedef uchar byte; #endif /// forward decls class THD; struct CSphReqQuery; struct CSphSEShare; struct CSphSEAttr; struct CSphSEStats; struct CSphSEThreadData; /// Sphinx SE handler class class ha_sphinx : public handler { protected: THR_LOCK_DATA m_tLock; ///< MySQL lock CSphSEShare * m_pShare; ///< shared lock info uint m_iMatchesTotal; uint m_iCurrentPos; const byte * m_pCurrentKey; uint m_iCurrentKeyLen; char * m_pResponse; ///< searchd response storage char * m_pResponseEnd; ///< searchd response storage end (points to wilderness!) char * m_pCur; ///< current position into response bool m_bUnpackError; ///< any errors while unpacking response public: #if MYSQL_VERSION_ID<50100 ha_sphinx ( TABLE_ARG * table_arg ); // NOLINT #else ha_sphinx ( handlerton * hton, TABLE_ARG * table_arg ); #endif ~ha_sphinx () {} const char * table_type () const { return "SPHINX"; } ///< SE name for display purposes const char * index_type ( uint ) { return "HASH"; } ///< index type name for display purposes const char ** bas_ext () const; ///< my file extensions #if MYSQL_VERSION_ID>50100 ulonglong table_flags () const { return HA_CAN_INDEX_BLOBS; } ///< bitmap of implemented flags (see handler.h for more info) #else ulong table_flags () const { return HA_CAN_INDEX_BLOBS; } ///< bitmap of implemented flags (see handler.h for more info) #endif ulong index_flags ( uint, uint, bool ) const { return 0; } ///< bitmap of flags that says how SE implements indexes uint max_supported_record_length () const { return HA_MAX_REC_LENGTH; } uint max_supported_keys () const { return 1; } uint max_supported_key_parts () const { return 1; } uint max_supported_key_length () const { return MAX_KEY_LENGTH; } uint max_supported_key_part_length () const { return MAX_KEY_LENGTH; } #if MYSQL_VERSION_ID>50100 virtual double scan_time () { return (double)( stats.records+stats.deleted )/20.0 + 10; } ///< called in test_quick_select to determine if indexes should be used #else virtual double scan_time () { return (double)( records+deleted )/20.0 + 10; } ///< called in test_quick_select to determine if indexes should be used #endif virtual double read_time ( ha_rows rows ) { return (double)rows/20.0 + 1; } ///< index read time estimate public: int open ( const char * name, int mode, uint test_if_locked ); int close (); int write_row ( byte * buf ); int update_row ( const byte * old_data, byte * new_data ); int delete_row ( const byte * buf ); int extra ( enum ha_extra_function op ); int index_init ( uint keynr, bool sorted ); // 5.1.x int index_init ( uint keynr ) { return index_init ( keynr, false ); } // 5.0.x int index_end (); int index_read ( byte * buf, const byte * key, uint key_len, enum ha_rkey_function find_flag ); int index_read_idx ( byte * buf, uint idx, const byte * key, uint key_len, enum ha_rkey_function find_flag ); int index_next ( byte * buf ); int index_next_same ( byte * buf, const byte * key, uint keylen ); int index_prev ( byte * buf ); int index_first ( byte * buf ); int index_last ( byte * buf ); int get_rec ( byte * buf, const byte * key, uint keylen ); int rnd_init ( bool scan ); int rnd_end (); int rnd_next ( byte * buf ); int rnd_pos ( byte * buf, byte * pos ); void position ( const byte * record ); #if MYSQL_VERSION_ID>=50030 int info ( uint ); #else void info ( uint ); #endif int reset(); int external_lock ( THD * thd, int lock_type ); int delete_all_rows (); ha_rows records_in_range ( uint inx, key_range * min_key, key_range * max_key ); int delete_table ( const char * from ); int rename_table ( const char * from, const char * to ); int create ( const char * name, TABLE * form, HA_CREATE_INFO * create_info ); THR_LOCK_DATA ** store_lock ( THD * thd, THR_LOCK_DATA ** to, enum thr_lock_type lock_type ); public: virtual const COND * cond_push ( const COND *cond ); virtual void cond_pop (); private: uint32 m_iFields; char ** m_dFields; uint32 m_iAttrs; CSphSEAttr * m_dAttrs; int m_bId64; int * m_dUnboundFields; private: int Connect ( const char * sQueryHost, ushort uPort ); int ConnectAPI ( const char * sQueryHost, int iQueryPort ); int HandleMysqlError ( struct st_mysql * pConn, int iErrCode ); uint32 UnpackDword (); char * UnpackString (); bool UnpackSchema (); bool UnpackStats ( CSphSEStats * pStats ); bool CheckResponcePtr ( int iLen ); CSphSEThreadData * GetTls (); }; #if MYSQL_VERSION_ID < 50100 bool sphinx_show_status ( THD * thd ); #endif int sphinx_showfunc_total_found ( THD *, SHOW_VAR *, char * ); int sphinx_showfunc_total ( THD *, SHOW_VAR *, char * ); int sphinx_showfunc_time ( THD *, SHOW_VAR *, char * ); int sphinx_showfunc_word_count ( THD *, SHOW_VAR *, char * ); int sphinx_showfunc_words ( THD *, SHOW_VAR *, char * ); // // $Id: ha_sphinx.h 2921 2011-08-21 21:35:02Z tomat $ // sphinx-2.0.4-release/mysqlse/gen_data.php0000644000176700017710000000132210441500053017701 0ustar deogardeogar sphinx-2.0.4-release/mysqlse/Makefile.am0000644000176700017710000000412411145362626017503 0ustar deogardeogar# Copyright (C) 2000 MySQL AB & MySQL Finland AB & TCX DataKonsult AB # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #called from the top level Makefile MYSQLDATAdir = $(localstatedir) MYSQLSHAREdir = $(pkgdatadir) MYSQLBASEdir= $(prefix) MYSQLLIBdir= $(pkglibdir) pkgplugindir = $(pkglibdir)/plugin INCLUDES = -I$(top_srcdir)/include -I$(top_builddir)/include \ -I$(top_srcdir)/regex \ -I$(top_srcdir)/sql \ -I$(srcdir) SUBDIRS = ../../include ../../mysys ../../strings ../../dbug ../../extra WRAPLIBS= LDADD = DEFS= @DEFS@ \ -D_REENTRANT -D_PTHREADS -DENGINE -DSTORAGE_ENGINE -DMYSQL_SERVER noinst_HEADERS = ha_sphinx.h EXTRA_LTLIBRARIES = ha_sphinx.la pkgplugin_LTLIBRARIES = @plugin_sphinx_shared_target@ sphinx.la ha_sphinx_la_LDFLAGS = -module -rpath $(MYSQLLIBdir) ha_sphinx_la_CXXFLAGS= $(AM_CFLAGS) -DMYSQL_DYNAMIC_PLUGIN ha_sphinx_la_CFLAGS = $(AM_CFLAGS) -DMYSQL_DYNAMIC_PLUGIN ha_sphinx_la_SOURCES = ha_sphinx.cc sphinx_la_LDFLAGS = -module sphinx_la_CXXFLAGS = $(AM_CFLAGS) -DMYSQL_DYNAMIC_PLUGIN sphinx_la_CFLAGS = $(AM_CFLAGS) -DMYSQL_DYNAMIC_PLUGIN sphinx_la_SOURCES = snippets_udf.cc EXTRA_LIBRARIES = libsphinx.a noinst_LIBRARIES = @plugin_sphinx_static_target@ libsphinx_a_CXXFLAGS = $(AM_CFLAGS) libsphinx_a_CFLAGS = $(AM_CFLAGS) libsphinx_a_SOURCES= ha_sphinx.cc EXTRA_DIST = cmakelists.txt # Don't update the files from bitkeeper %::SCCS/s.% sphinx-2.0.4-release/mysqlse/ha_sphinx.cc0000644000176700017710000026165311723677010017751 0ustar deogardeogar// // $Id: ha_sphinx.cc 3133 2012-03-01 13:47:52Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifdef USE_PRAGMA_IMPLEMENTATION #pragma implementation // gcc: Class implementation #endif #if _MSC_VER>=1400 #define _CRT_SECURE_NO_DEPRECATE 1 #define _CRT_NONSTDC_NO_DEPRECATE 1 #endif #include #if MYSQL_VERSION_ID>=50515 #include "sql_class.h" #include "sql_array.h" #elif MYSQL_VERSION_ID>50100 #include "mysql_priv.h" #include #else #include "../mysql_priv.h" #endif #include #include #include // include client for INSERT table (sort of redoing federated..) #ifndef __WIN__ // UNIX-specific #include #include #include #define RECV_FLAGS MSG_WAITALL #define sphSockClose(_sock) ::close(_sock) #else // Windows-specific #include #define strcasecmp stricmp #define snprintf _snprintf #define RECV_FLAGS 0 #define sphSockClose(_sock) ::closesocket(_sock) #endif #include #include "ha_sphinx.h" #ifndef MSG_WAITALL #define MSG_WAITALL 0 #endif #if _MSC_VER>=1400 #pragma warning(push,4) #endif ///////////////////////////////////////////////////////////////////////////// /// there might be issues with min() on different platforms (eg. Gentoo, they say) #define Min(a,b) ((a)<(b)?(a):(b)) /// unaligned RAM accesses are forbidden on SPARC #if defined(sparc) || defined(__sparc__) #define UNALIGNED_RAM_ACCESS 0 #else #define UNALIGNED_RAM_ACCESS 1 #endif #if UNALIGNED_RAM_ACCESS /// pass-through wrapper template < typename T > inline T sphUnalignedRead ( const T & tRef ) { return tRef; } /// pass-through wrapper template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { *(T*)pPtr = tVal; } #else /// unaligned read wrapper for some architectures (eg. SPARC) template < typename T > inline T sphUnalignedRead ( const T & tRef ) { T uTmp; byte * pSrc = (byte *) &tRef; byte * pDst = (byte *) &uTmp; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; return uTmp; } /// unaligned write wrapper for some architectures (eg. SPARC) template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { byte * pDst = (byte *) pPtr; byte * pSrc = (byte *) &tVal; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; } #endif #if MYSQL_VERSION_ID>=50515 #define sphinx_hash_init my_hash_init #define sphinx_hash_free my_hash_free #define sphinx_hash_search my_hash_search #define sphinx_hash_delete my_hash_delete #else #define sphinx_hash_init hash_init #define sphinx_hash_free hash_free #define sphinx_hash_search hash_search #define sphinx_hash_delete hash_delete #endif ///////////////////////////////////////////////////////////////////////////// // FIXME! make this all dynamic #define SPHINXSE_MAX_FILTERS 32 #define SPHINXAPI_DEFAULT_HOST "127.0.0.1" #define SPHINXAPI_DEFAULT_PORT 9312 #define SPHINXAPI_DEFAULT_INDEX "*" #define SPHINXQL_DEFAULT_PORT 9306 #define SPHINXSE_SYSTEM_COLUMNS 3 #define SPHINXSE_MAX_ALLOC (16*1024*1024) #define SPHINXSE_MAX_KEYWORDSTATS 4096 #define SPHINXSE_VERSION "2.0.4-release" // FIXME? the following is cut-n-paste from sphinx.h and searchd.cpp // cut-n-paste is somewhat simpler that adding dependencies however.. enum { SPHINX_SEARCHD_PROTO = 1, SEARCHD_COMMAND_SEARCH = 0, VER_COMMAND_SEARCH = 0x119, }; /// search query sorting orders enum ESphSortOrder { SPH_SORT_RELEVANCE = 0, ///< sort by document relevance desc, then by date SPH_SORT_ATTR_DESC = 1, ///< sort by document date desc, then by relevance desc SPH_SORT_ATTR_ASC = 2, ///< sort by document date asc, then by relevance desc SPH_SORT_TIME_SEGMENTS = 3, ///< sort by time segments (hour/day/week/etc) desc, then by relevance desc SPH_SORT_EXTENDED = 4, ///< sort by SQL-like expression (eg. "@relevance DESC, price ASC, @id DESC") SPH_SORT_EXPR = 5, ///< sort by expression SPH_SORT_TOTAL }; /// search query matching mode enum ESphMatchMode { SPH_MATCH_ALL = 0, ///< match all query words SPH_MATCH_ANY, ///< match any query word SPH_MATCH_PHRASE, ///< match this exact phrase SPH_MATCH_BOOLEAN, ///< match this boolean query SPH_MATCH_EXTENDED, ///< match this extended query SPH_MATCH_FULLSCAN, ///< match all document IDs w/o fulltext query, apply filters SPH_MATCH_EXTENDED2, ///< extended engine V2 SPH_MATCH_TOTAL }; /// search query relevance ranking mode enum ESphRankMode { SPH_RANK_PROXIMITY_BM25 = 0, ///< default mode, phrase proximity major factor and BM25 minor one SPH_RANK_BM25 = 1, ///< statistical mode, BM25 ranking only (faster but worse quality) SPH_RANK_NONE = 2, ///< no ranking, all matches get a weight of 1 SPH_RANK_WORDCOUNT = 3, ///< simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts SPH_RANK_PROXIMITY = 4, ///< phrase proximity SPH_RANK_MATCHANY = 5, ///< emulate old match-any weighting SPH_RANK_FIELDMASK = 6, ///< sets bits where there were matches SPH_RANK_SPH04 = 7, ///< codename SPH04, phrase proximity + bm25 + head/exact boost SPH_RANK_EXPR = 8, ///< expression based ranker SPH_RANK_TOTAL, SPH_RANK_DEFAULT = SPH_RANK_PROXIMITY_BM25 }; /// search query grouping mode enum ESphGroupBy { SPH_GROUPBY_DAY = 0, ///< group by day SPH_GROUPBY_WEEK = 1, ///< group by week SPH_GROUPBY_MONTH = 2, ///< group by month SPH_GROUPBY_YEAR = 3, ///< group by year SPH_GROUPBY_ATTR = 4 ///< group by attribute value }; /// known attribute types enum { SPH_ATTR_NONE = 0, ///< not an attribute at all SPH_ATTR_INTEGER = 1, ///< this attr is just an integer SPH_ATTR_TIMESTAMP = 2, ///< this attr is a timestamp SPH_ATTR_ORDINAL = 3, ///< this attr is an ordinal string number (integer at search time, specially handled at indexing time) SPH_ATTR_BOOL = 4, ///< this attr is a boolean bit field SPH_ATTR_FLOAT = 5, SPH_ATTR_BIGINT = 6, SPH_ATTR_STRING = 7, ///< string (binary; in-memory) SPH_ATTR_UINT32SET = 0x40000001UL, ///< this attr is multiple int32 values (0 or more) SPH_ATTR_UINT64SET = 0x40000002UL ///< this attr is multiple int64 values (0 or more) }; /// known answers enum { SEARCHD_OK = 0, ///< general success, command-specific reply follows SEARCHD_ERROR = 1, ///< general failure, error message follows SEARCHD_RETRY = 2, ///< temporary failure, error message follows, client should retry later SEARCHD_WARNING = 3 ///< general success, warning message and command-specific reply follow }; ////////////////////////////////////////////////////////////////////////////// #define SPHINX_DEBUG_OUTPUT 0 #define SPHINX_DEBUG_CALLS 0 #include #if SPHINX_DEBUG_OUTPUT inline void SPH_DEBUG ( const char * format, ... ) { va_list ap; va_start ( ap, format ); fprintf ( stderr, "SphinxSE: " ); vfprintf ( stderr, format, ap ); fprintf ( stderr, "\n" ); va_end ( ap ); } #else inline void SPH_DEBUG ( const char *, ... ) {} #endif #if SPHINX_DEBUG_CALLS #define SPH_ENTER_FUNC() { SPH_DEBUG ( "enter %s", __FUNCTION__ ); } #define SPH_ENTER_METHOD() { SPH_DEBUG ( "enter %s(this=%08x)", __FUNCTION__, this ); } #define SPH_RET(_arg) { SPH_DEBUG ( "leave %s", __FUNCTION__ ); return _arg; } #define SPH_VOID_RET() { SPH_DEBUG ( "leave %s", __FUNCTION__ ); return; } #else #define SPH_ENTER_FUNC() #define SPH_ENTER_METHOD() #define SPH_RET(_arg) { return(_arg); } #define SPH_VOID_RET() { return; } #endif #define SafeDelete(_arg) { if ( _arg ) delete ( _arg ); (_arg) = NULL; } #define SafeDeleteArray(_arg) { if ( _arg ) delete [] ( _arg ); (_arg) = NULL; } ////////////////////////////////////////////////////////////////////////////// /// per-table structure that will be shared among all open Sphinx SE handlers struct CSphSEShare { pthread_mutex_t m_tMutex; THR_LOCK m_tLock; char * m_sTable; char * m_sScheme; ///< our connection string char * m_sHost; ///< points into m_sScheme buffer, DO NOT FREE EXPLICITLY char * m_sSocket; ///< points into m_sScheme buffer, DO NOT FREE EXPLICITLY char * m_sIndex; ///< points into m_sScheme buffer, DO NOT FREE EXPLICITLY ushort m_iPort; bool m_bSphinxQL; ///< is this read-only SphinxAPI table, or write-only SphinxQL table? uint m_iTableNameLen; uint m_iUseCount; CHARSET_INFO * m_pTableQueryCharset; int m_iTableFields; char ** m_sTableField; enum_field_types * m_eTableFieldType; CSphSEShare () : m_sTable ( NULL ) , m_sScheme ( NULL ) , m_sHost ( NULL ) , m_sSocket ( NULL ) , m_sIndex ( NULL ) , m_iPort ( 0 ) , m_bSphinxQL ( false ) , m_iTableNameLen ( 0 ) , m_iUseCount ( 1 ) , m_pTableQueryCharset ( NULL ) , m_iTableFields ( 0 ) , m_sTableField ( NULL ) , m_eTableFieldType ( NULL ) { thr_lock_init ( &m_tLock ); pthread_mutex_init ( &m_tMutex, MY_MUTEX_INIT_FAST ); } ~CSphSEShare () { pthread_mutex_destroy ( &m_tMutex ); thr_lock_delete ( &m_tLock ); SafeDeleteArray ( m_sTable ); SafeDeleteArray ( m_sScheme ); ResetTable (); } void ResetTable () { for ( int i=0; i m_dIds; Dynamic_array m_dValues; }; Dynamic_array m_dOverrides; public: char m_sParseError[256]; public: CSphSEQuery ( const char * sQuery, int iLength, const char * sIndex ); ~CSphSEQuery (); bool Parse (); int BuildRequest ( char ** ppBuffer ); protected: char * m_pBuf; char * m_pCur; int m_iBufLeft; bool m_bBufOverrun; template < typename T > int ParseArray ( T ** ppValues, const char * sValue ); bool ParseField ( char * sField ); void SendBytes ( const void * pBytes, int iBytes ); void SendWord ( short int v ) { v = ntohs(v); SendBytes ( &v, sizeof(v) ); } void SendInt ( int v ) { v = ntohl(v); SendBytes ( &v, sizeof(v) ); } void SendDword ( uint v ) { v = ntohl(v) ;SendBytes ( &v, sizeof(v) ); } void SendUint64 ( ulonglong v ) { SendDword ( (uint)(v>>32) ); SendDword ( (uint)(v&0xFFFFFFFFUL) ); } void SendString ( const char * v ) { int iLen = strlen(v); SendDword(iLen); SendBytes ( v, iLen ); } void SendFloat ( float v ) { SendDword ( sphF2DW(v) ); } }; template int CSphSEQuery::ParseArray ( uint32 **, const char * ); template int CSphSEQuery::ParseArray ( longlong **, const char * ); ////////////////////////////////////////////////////////////////////////////// #if MYSQL_VERSION_ID>50100 #if MYSQL_VERSION_ID<50114 #error Sphinx SE requires MySQL 5.1.14 or higher if compiling for 5.1.x series! #endif static handler * sphinx_create_handler ( handlerton * hton, TABLE_SHARE * table, MEM_ROOT * mem_root ); static int sphinx_init_func ( void * p ); static int sphinx_close_connection ( handlerton * hton, THD * thd ); static int sphinx_panic ( handlerton * hton, enum ha_panic_function flag ); static bool sphinx_show_status ( handlerton * hton, THD * thd, stat_print_fn * stat_print, enum ha_stat_type stat_type ); #else static bool sphinx_init_func_for_handlerton (); static int sphinx_close_connection ( THD * thd ); bool sphinx_show_status ( THD * thd ); #endif // >50100 ////////////////////////////////////////////////////////////////////////////// static const char sphinx_hton_name[] = "SPHINX"; static const char sphinx_hton_comment[] = "Sphinx storage engine " SPHINXSE_VERSION; #if MYSQL_VERSION_ID<50100 handlerton sphinx_hton = { #ifdef MYSQL_HANDLERTON_INTERFACE_VERSION MYSQL_HANDLERTON_INTERFACE_VERSION, #endif sphinx_hton_name, SHOW_OPTION_YES, sphinx_hton_comment, DB_TYPE_SPHINX_DB, sphinx_init_func_for_handlerton, 0, // slot 0, // savepoint size sphinx_close_connection, // close_connection NULL, // savepoint NULL, // rollback to savepoint NULL, // release savepoint NULL, // commit NULL, // rollback NULL, // prepare NULL, // recover NULL, // commit_by_xid NULL, // rollback_by_xid NULL, // create_cursor_read_view NULL, // set_cursor_read_view NULL, // close_cursor_read_view HTON_CAN_RECREATE }; #else static handlerton * sphinx_hton_ptr = NULL; #endif ////////////////////////////////////////////////////////////////////////////// // variables for Sphinx shared methods pthread_mutex_t sphinx_mutex; // mutex to init the hash static int sphinx_init = 0; // flag whether the hash was initialized static HASH sphinx_open_tables; // hash used to track open tables ////////////////////////////////////////////////////////////////////////////// // INITIALIZATION AND SHUTDOWN ////////////////////////////////////////////////////////////////////////////// // hashing function #if MYSQL_VERSION_ID>=50120 typedef size_t GetKeyLength_t; #else typedef uint GetKeyLength_t; #endif static byte * sphinx_get_key ( const byte * pSharePtr, GetKeyLength_t * pLength, my_bool ) { CSphSEShare * pShare = (CSphSEShare *) pSharePtr; *pLength = (size_t) pShare->m_iTableNameLen; return (byte*) pShare->m_sTable; } #if MYSQL_VERSION_ID<50100 static int sphinx_init_func ( void * ) // to avoid unused arg warning #else static int sphinx_init_func ( void * p ) #endif { SPH_ENTER_FUNC(); if ( !sphinx_init ) { sphinx_init = 1; void ( pthread_mutex_init ( &sphinx_mutex, MY_MUTEX_INIT_FAST ) ); sphinx_hash_init ( &sphinx_open_tables, system_charset_info, 32, 0, 0, sphinx_get_key, 0, 0 ); #if MYSQL_VERSION_ID > 50100 handlerton * hton = (handlerton*) p; hton->state = SHOW_OPTION_YES; hton->db_type = DB_TYPE_FIRST_DYNAMIC; hton->create = sphinx_create_handler; hton->close_connection = sphinx_close_connection; hton->show_status = sphinx_show_status; hton->panic = sphinx_panic; hton->flags = HTON_CAN_RECREATE; #endif } SPH_RET(0); } #if MYSQL_VERSION_ID<50100 static bool sphinx_init_func_for_handlerton () { return sphinx_init_func ( &sphinx_hton ); } #endif #if MYSQL_VERSION_ID>50100 static int sphinx_close_connection ( handlerton * hton, THD * thd ) { // deallocate common handler data SPH_ENTER_FUNC(); void ** tmp = thd_ha_data ( thd, hton ); CSphSEThreadData * pTls = (CSphSEThreadData*) (*tmp); SafeDelete ( pTls ); *tmp = NULL; SPH_RET(0); } static int sphinx_done_func ( void * ) { SPH_ENTER_FUNC(); int error = 0; if ( sphinx_init ) { sphinx_init = 0; if ( sphinx_open_tables.records ) error = 1; sphinx_hash_free ( &sphinx_open_tables ); pthread_mutex_destroy ( &sphinx_mutex ); } SPH_RET(0); } static int sphinx_panic ( handlerton * hton, enum ha_panic_function ) { return sphinx_done_func ( hton ); } #else static int sphinx_close_connection ( THD * thd ) { // deallocate common handler data SPH_ENTER_FUNC(); CSphSEThreadData * pTls = (CSphSEThreadData*) thd->ha_data[sphinx_hton.slot]; SafeDelete ( pTls ); thd->ha_data[sphinx_hton.slot] = NULL; SPH_RET(0); } #endif // >50100 ////////////////////////////////////////////////////////////////////////////// // SHOW STATUS ////////////////////////////////////////////////////////////////////////////// #if MYSQL_VERSION_ID>50100 static bool sphinx_show_status ( handlerton * hton, THD * thd, stat_print_fn * stat_print, enum ha_stat_type ) #else bool sphinx_show_status ( THD * thd ) #endif { SPH_ENTER_FUNC(); #if MYSQL_VERSION_ID<50100 Protocol * protocol = thd->protocol; List field_list; #endif char buf1[IO_SIZE]; uint buf1len; char buf2[IO_SIZE]; uint buf2len = 0; String words; buf1[0] = '\0'; buf2[0] = '\0'; #if MYSQL_VERSION_ID>50100 // 5.1.x style stats CSphSEThreadData * pTls = (CSphSEThreadData*) ( *thd_ha_data ( thd, hton ) ); #define LOC_STATS(_key,_keylen,_val,_vallen) \ stat_print ( thd, sphinx_hton_name, strlen(sphinx_hton_name), _key, _keylen, _val, _vallen ); #else // 5.0.x style stats if ( have_sphinx_db!=SHOW_OPTION_YES ) { my_message ( ER_NOT_SUPPORTED_YET, "failed to call SHOW SPHINX STATUS: --skip-sphinx was specified", MYF(0) ); SPH_RET(TRUE); } CSphSEThreadData * pTls = (CSphSEThreadData*) thd->ha_data[sphinx_hton.slot]; field_list.push_back ( new Item_empty_string ( "Type", 10 ) ); field_list.push_back ( new Item_empty_string ( "Name", FN_REFLEN ) ); field_list.push_back ( new Item_empty_string ( "Status", 10 ) ); if ( protocol->send_fields ( &field_list, Protocol::SEND_NUM_ROWS | Protocol::SEND_EOF ) ) SPH_RET(TRUE); #define LOC_STATS(_key,_keylen,_val,_vallen) \ protocol->prepare_for_resend (); \ protocol->store ( "SPHINX", 6, system_charset_info ); \ protocol->store ( _key, _keylen, system_charset_info ); \ protocol->store ( _val, _vallen, system_charset_info ); \ if ( protocol->write() ) \ SPH_RET(TRUE); #endif // show query stats if ( pTls && pTls->m_bStats ) { const CSphSEStats * pStats = &pTls->m_tStats; buf1len = my_snprintf ( buf1, sizeof(buf1), "total: %d, total found: %d, time: %d, words: %d", pStats->m_iMatchesTotal, pStats->m_iMatchesFound, pStats->m_iQueryMsec, pStats->m_iWords ); LOC_STATS ( "stats", 5, buf1, buf1len ); if ( pStats->m_iWords ) { for ( int i=0; im_iWords; i++ ) { CSphSEWordStats & tWord = pStats->m_dWords[i]; buf2len = my_snprintf ( buf2, sizeof(buf2), "%s%s:%d:%d ", buf2, tWord.m_sWord, tWord.m_iDocs, tWord.m_iHits ); } // convert it if we can const char * sWord = buf2; int iWord = buf2len; String sBuf3; if ( pTls->m_pQueryCharset ) { uint iErrors; sBuf3.copy ( buf2, buf2len, pTls->m_pQueryCharset, system_charset_info, &iErrors ); sWord = sBuf3.c_ptr(); iWord = sBuf3.length(); } LOC_STATS ( "words", 5, sWord, iWord ); } } // show last error or warning (either in addition to stats, or on their own) if ( pTls && pTls->m_tStats.m_sLastMessage && pTls->m_tStats.m_sLastMessage[0] ) { const char * sMessageType = pTls->m_tStats.m_bLastError ? "error" : "warning"; LOC_STATS ( sMessageType, strlen ( sMessageType ), pTls->m_tStats.m_sLastMessage, strlen ( pTls->m_tStats.m_sLastMessage ) ); } else { // well, nothing to show just yet #if MYSQL_VERSION_ID < 50100 LOC_STATS ( "stats", 5, "no query has been executed yet", sizeof("no query has been executed yet")-1 ); #endif } #if MYSQL_VERSION_ID < 50100 send_eof(thd); #endif SPH_RET(FALSE); } ////////////////////////////////////////////////////////////////////////////// // HELPERS ////////////////////////////////////////////////////////////////////////////// static char * sphDup ( const char * sSrc, int iLen=-1 ) { if ( !sSrc ) return NULL; if ( iLen<0 ) iLen = strlen(sSrc); char * sRes = new char [ 1+iLen ]; memcpy ( sRes, sSrc, iLen ); sRes[iLen] = '\0'; return sRes; } static void sphLogError ( const char * sFmt, ... ) { // emit timestamp #ifdef __WIN__ SYSTEMTIME t; GetLocalTime ( &t ); fprintf ( stderr, "%02d%02d%02d %2d:%02d:%02d SphinxSE: internal error: ", (int)t.wYear % 100, (int)t.wMonth, (int)t.wDay, (int)t.wHour, (int)t.wMinute, (int)t.wSecond ); #else // Unix version time_t tStamp; time ( &tStamp ); struct tm * pParsed; #ifdef HAVE_LOCALTIME_R struct tm tParsed; localtime_r ( &tStamp, &tParsed ); pParsed = &tParsed; #else pParsed = localtime ( &tStamp ); #endif // HAVE_LOCALTIME_R fprintf ( stderr, "%02d%02d%02d %2d:%02d:%02d SphinxSE: internal error: ", pParsed->tm_year % 100, pParsed->tm_mon + 1, pParsed->tm_mday, pParsed->tm_hour, pParsed->tm_min, pParsed->tm_sec); #endif // __WIN__ // emit message va_list ap; va_start ( ap, sFmt ); vfprintf ( stderr, sFmt, ap ); va_end ( ap ); // emit newline fprintf ( stderr, "\n" ); } // the following scheme variants are recognized // // sphinx://host[:port]/index // sphinxql://host[:port]/index // unix://unix/domain/socket[:index] static bool ParseUrl ( CSphSEShare * share, TABLE * table, bool bCreate ) { SPH_ENTER_FUNC(); if ( share ) { // check incoming stuff if ( !table ) { sphLogError ( "table==NULL in ParseUrl()" ); return false; } if ( !table->s ) { sphLogError ( "(table->s)==NULL in ParseUrl()" ); return false; } // free old stuff share->ResetTable (); // fill new stuff share->m_iTableFields = table->s->fields; if ( share->m_iTableFields ) { share->m_sTableField = new char * [ share->m_iTableFields ]; share->m_eTableFieldType = new enum_field_types [ share->m_iTableFields ]; for ( int i=0; im_iTableFields; i++ ) { share->m_sTableField[i] = sphDup ( table->field[i]->field_name ); share->m_eTableFieldType[i] = table->field[i]->type(); } } } // defaults bool bOk = true; bool bQL = false; char * sScheme = NULL; char * sHost = SPHINXAPI_DEFAULT_HOST; char * sIndex = SPHINXAPI_DEFAULT_INDEX; int iPort = SPHINXAPI_DEFAULT_PORT; // parse connection string, if any while ( table->s->connect_string.length!=0 ) { sScheme = sphDup ( table->s->connect_string.str, table->s->connect_string.length ); sHost = strstr ( sScheme, "://" ); if ( !sHost ) { bOk = false; break; } sHost[0] = '\0'; sHost += 3; ///////////////////////////// // sphinxapi via unix socket ///////////////////////////// if ( !strcmp ( sScheme, "unix" ) ) { sHost--; // reuse last slash iPort = 0; if (!( sIndex = strrchr ( sHost, ':' ) )) sIndex = SPHINXAPI_DEFAULT_INDEX; else { *sIndex++ = '\0'; if ( !*sIndex ) sIndex = SPHINXAPI_DEFAULT_INDEX; } bOk = true; break; } ///////////////////// // sphinxapi via tcp ///////////////////// if ( !strcmp ( sScheme, "sphinx" ) ) { char * sPort = strchr ( sHost, ':' ); if ( sPort ) { *sPort++ = '\0'; if ( *sPort ) { sIndex = strchr ( sPort, '/' ); if ( sIndex ) *sIndex++ = '\0'; else sIndex = SPHINXAPI_DEFAULT_INDEX; iPort = atoi(sPort); if ( !iPort ) iPort = SPHINXAPI_DEFAULT_PORT; } } else { sIndex = strchr ( sHost, '/' ); if ( sIndex ) *sIndex++ = '\0'; else sIndex = SPHINXAPI_DEFAULT_INDEX; } bOk = true; break; } //////////// // sphinxql //////////// if ( !strcmp ( sScheme, "sphinxql" ) ) { bQL = true; iPort = SPHINXQL_DEFAULT_PORT; // handle port char * sPort = strchr ( sHost, ':' ); sIndex = sHost; // starting point for index name search if ( sPort ) { *sPort++ = '\0'; sIndex = sPort; iPort = atoi(sPort); if ( !iPort ) { bOk = false; // invalid port; can report ER_FOREIGN_DATA_STRING_INVALID break; } } // find index sIndex = strchr ( sIndex, '/' ); if ( sIndex ) *sIndex++ = '\0'; // final checks // host and index names are required bOk = ( sHost && *sHost && sIndex && *sIndex ); break; } // unknown case bOk = false; break; } if ( !bOk ) { my_error ( bCreate ? ER_FOREIGN_DATA_STRING_INVALID_CANT_CREATE : ER_FOREIGN_DATA_STRING_INVALID, MYF(0), table->s->connect_string ); } else { if ( share ) { SafeDeleteArray ( share->m_sScheme ); share->m_sScheme = sScheme; share->m_sHost = sHost; share->m_sIndex = sIndex; share->m_iPort = (ushort)iPort; share->m_bSphinxQL = bQL; } } if ( !bOk && !share ) SafeDeleteArray ( sScheme ); SPH_RET(bOk); } // Example of simple lock controls. The "share" it creates is structure we will // pass to each sphinx handler. Do you have to have one of these? Well, you have // pieces that are used for locking, and they are needed to function. static CSphSEShare * get_share ( const char * table_name, TABLE * table ) { SPH_ENTER_FUNC(); pthread_mutex_lock ( &sphinx_mutex ); CSphSEShare * pShare = NULL; for ( ;; ) { // check if we already have this share #if MYSQL_VERSION_ID>=50120 pShare = (CSphSEShare*) sphinx_hash_search ( &sphinx_open_tables, (const uchar *) table_name, strlen(table_name) ); #else #ifdef __WIN__ pShare = (CSphSEShare*) sphinx_hash_search ( &sphinx_open_tables, (const byte *) table_name, strlen(table_name) ); #else pShare = (CSphSEShare*) sphinx_hash_search ( &sphinx_open_tables, table_name, strlen(table_name) ); #endif // win #endif // pre-5.1.20 if ( pShare ) { pShare->m_iUseCount++; break; } // try to allocate new share pShare = new CSphSEShare (); if ( !pShare ) break; // try to setup it if ( !ParseUrl ( pShare, table, false ) ) { SafeDelete ( pShare ); break; } if ( !pShare->m_bSphinxQL ) pShare->m_pTableQueryCharset = table->field[2]->charset(); // try to hash it pShare->m_iTableNameLen = strlen(table_name); pShare->m_sTable = sphDup ( table_name ); if ( my_hash_insert ( &sphinx_open_tables, (const byte *)pShare ) ) { SafeDelete ( pShare ); break; } // all seems fine break; } pthread_mutex_unlock ( &sphinx_mutex ); SPH_RET(pShare); } // Free lock controls. We call this whenever we close a table. If the table had // the last reference to the share then we free memory associated with it. static int free_share ( CSphSEShare * pShare ) { SPH_ENTER_FUNC(); pthread_mutex_lock ( &sphinx_mutex ); if ( !--pShare->m_iUseCount ) { sphinx_hash_delete ( &sphinx_open_tables, (byte *)pShare ); SafeDelete ( pShare ); } pthread_mutex_unlock ( &sphinx_mutex ); SPH_RET(0); } #if MYSQL_VERSION_ID>50100 static handler * sphinx_create_handler ( handlerton * hton, TABLE_SHARE * table, MEM_ROOT * mem_root ) { sphinx_hton_ptr = hton; return new ( mem_root ) ha_sphinx ( hton, table ); } #endif ////////////////////////////////////////////////////////////////////////////// // CLIENT-SIDE REQUEST STUFF ////////////////////////////////////////////////////////////////////////////// CSphSEQuery::CSphSEQuery ( const char * sQuery, int iLength, const char * sIndex ) : m_sHost ( "" ) , m_iPort ( 0 ) , m_sIndex ( sIndex ? sIndex : "*" ) , m_iOffset ( 0 ) , m_iLimit ( 20 ) , m_bQuery ( false ) , m_sQuery ( "" ) , m_pWeights ( NULL ) , m_iWeights ( 0 ) , m_eMode ( SPH_MATCH_ALL ) , m_eRanker ( SPH_RANK_PROXIMITY_BM25 ) , m_sRankExpr ( NULL ) , m_eSort ( SPH_SORT_RELEVANCE ) , m_sSortBy ( "" ) , m_iMaxMatches ( 1000 ) , m_iMaxQueryTime ( 0 ) , m_iMinID ( 0 ) , m_iMaxID ( 0 ) , m_iFilters ( 0 ) , m_eGroupFunc ( SPH_GROUPBY_DAY ) , m_sGroupBy ( "" ) , m_sGroupSortBy ( "@group desc" ) , m_iCutoff ( 0 ) , m_iRetryCount ( 0 ) , m_iRetryDelay ( 0 ) , m_sGroupDistinct ( "" ) , m_iIndexWeights ( 0 ) , m_iFieldWeights ( 0 ) , m_bGeoAnchor ( false ) , m_sGeoLatAttr ( "" ) , m_sGeoLongAttr ( "" ) , m_fGeoLatitude ( 0.0f ) , m_fGeoLongitude ( 0.0f ) , m_sComment ( "" ) , m_sSelect ( "" ) , m_pBuf ( NULL ) , m_pCur ( NULL ) , m_iBufLeft ( 0 ) , m_bBufOverrun ( false ) { m_sQueryBuffer = new char [ iLength+2 ]; memcpy ( m_sQueryBuffer, sQuery, iLength ); m_sQueryBuffer[iLength] = ';'; m_sQueryBuffer[iLength+1] = '\0'; } CSphSEQuery::~CSphSEQuery () { SPH_ENTER_METHOD(); SafeDeleteArray ( m_sQueryBuffer ); SafeDeleteArray ( m_pWeights ); SafeDeleteArray ( m_pBuf ); for ( int i=0; i int CSphSEQuery::ParseArray ( T ** ppValues, const char * sValue ) { SPH_ENTER_METHOD(); assert ( ppValues ); assert ( !(*ppValues) ); const char * pValue; bool bPrevDigit = false; int iValues = 0; // count the values for ( pValue=sValue; *pValue; pValue++ ) { bool bDigit = (*pValue)>='0' && (*pValue)<='9'; if ( bDigit && !bPrevDigit ) iValues++; bPrevDigit = bDigit; } if ( !iValues ) SPH_RET(0); // extract the values T * pValues = new T [ iValues ]; *ppValues = pValues; int iIndex = 0, iSign = 1; T uValue = 0; bPrevDigit = false; for ( pValue=sValue ;; pValue++ ) { bool bDigit = (*pValue)>='0' && (*pValue)<='9'; if ( bDigit ) { if ( !bPrevDigit ) uValue = 0; uValue = uValue*10 + ( (*pValue)-'0' ); } else if ( bPrevDigit ) { assert ( iIndexs && isspace ( p[-1] ) ) p--; *p = '\0'; return s; } static bool myisattr ( char c ) { return ( c>='0' && c<='9' ) || ( c>='a' && c<='z' ) || ( c>='A' && c<='Z' ) || c=='_'; } bool CSphSEQuery::ParseField ( char * sField ) { SPH_ENTER_METHOD(); // look for option name/value separator char * sValue = strchr ( sField, '=' ); if ( !sValue || sValue==sField || sValue[-1]=='\\' ) { // by default let's assume it's just query if ( sField[0] ) { if ( m_bQuery ) { snprintf ( m_sParseError, sizeof(m_sParseError), "search query already specified; '%s' is redundant", sField ); SPH_RET(false); } else { m_sQuery = sField; m_bQuery = true; // unescape only 1st one char *s = sField, *d = sField; int iSlashes = 0; while ( *s ) { iSlashes = ( *s=='\\' ) ? iSlashes+1 : 0; if ( ( iSlashes%2 )==0 ) *d++ = *s; s++; } *d = '\0'; } } SPH_RET(true); } // split *sValue++ = '\0'; sValue = chop ( sValue ); int iValue = atoi ( sValue ); // handle options char * sName = chop ( sField ); if ( !strcmp ( sName, "query" ) ) m_sQuery = sValue; else if ( !strcmp ( sName, "host" ) ) m_sHost = sValue; else if ( !strcmp ( sName, "port" ) ) m_iPort = iValue; else if ( !strcmp ( sName, "index" ) ) m_sIndex = sValue; else if ( !strcmp ( sName, "offset" ) ) m_iOffset = iValue; else if ( !strcmp ( sName, "limit" ) ) m_iLimit = iValue; else if ( !strcmp ( sName, "weights" ) ) m_iWeights = ParseArray ( &m_pWeights, sValue ); else if ( !strcmp ( sName, "minid" ) ) m_iMinID = iValue; else if ( !strcmp ( sName, "maxid" ) ) m_iMaxID = iValue; else if ( !strcmp ( sName, "maxmatches" ) ) m_iMaxMatches = iValue; else if ( !strcmp ( sName, "maxquerytime" ) ) m_iMaxQueryTime = iValue; else if ( !strcmp ( sName, "groupsort" ) ) m_sGroupSortBy = sValue; else if ( !strcmp ( sName, "distinct" ) ) m_sGroupDistinct = sValue; else if ( !strcmp ( sName, "cutoff" ) ) m_iCutoff = iValue; else if ( !strcmp ( sName, "comment" ) ) m_sComment = sValue; else if ( !strcmp ( sName, "select" ) ) m_sSelect = sValue; else if ( !strcmp ( sName, "mode" ) ) { m_eMode = SPH_MATCH_ALL; if ( !strcmp ( sValue, "any" ) ) m_eMode = SPH_MATCH_ANY; else if ( !strcmp ( sValue, "phrase" ) ) m_eMode = SPH_MATCH_PHRASE; else if ( !strcmp ( sValue, "boolean" ) ) m_eMode = SPH_MATCH_BOOLEAN; else if ( !strcmp ( sValue, "ext" ) ) m_eMode = SPH_MATCH_EXTENDED; else if ( !strcmp ( sValue, "extended" ) ) m_eMode = SPH_MATCH_EXTENDED; else if ( !strcmp ( sValue, "ext2" ) ) m_eMode = SPH_MATCH_EXTENDED2; else if ( !strcmp ( sValue, "extended2" ) ) m_eMode = SPH_MATCH_EXTENDED2; else if ( !strcmp ( sValue, "all" ) ) m_eMode = SPH_MATCH_ALL; else if ( !strcmp ( sValue, "fullscan" ) ) m_eMode = SPH_MATCH_FULLSCAN; else { snprintf ( m_sParseError, sizeof(m_sParseError), "unknown matching mode '%s'", sValue ); SPH_RET(false); } } else if ( !strcmp ( sName, "ranker" ) ) { m_eRanker = SPH_RANK_PROXIMITY_BM25; if ( !strcmp ( sValue, "proximity_bm25" ) ) m_eRanker = SPH_RANK_PROXIMITY_BM25; else if ( !strcmp ( sValue, "bm25" ) ) m_eRanker = SPH_RANK_BM25; else if ( !strcmp ( sValue, "none" ) ) m_eRanker = SPH_RANK_NONE; else if ( !strcmp ( sValue, "wordcount" ) ) m_eRanker = SPH_RANK_WORDCOUNT; else if ( !strcmp ( sValue, "proximity" ) ) m_eRanker = SPH_RANK_PROXIMITY; else if ( !strcmp ( sValue, "matchany" ) ) m_eRanker = SPH_RANK_MATCHANY; else if ( !strcmp ( sValue, "fieldmask" ) ) m_eRanker = SPH_RANK_FIELDMASK; else if ( !strcmp ( sValue, "sph04" ) ) m_eRanker = SPH_RANK_SPH04; else if ( !strncmp ( sValue, "expr:", 5 ) ) { m_eRanker = SPH_RANK_EXPR; m_sRankExpr = sValue+5; } else { snprintf ( m_sParseError, sizeof(m_sParseError), "unknown ranking mode '%s'", sValue ); SPH_RET(false); } } else if ( !strcmp ( sName, "sort" ) ) { static const struct { const char * m_sName; ESphSortOrder m_eSort; } dSortModes[] = { { "relevance", SPH_SORT_RELEVANCE }, { "attr_desc:", SPH_SORT_ATTR_DESC }, { "attr_asc:", SPH_SORT_ATTR_ASC }, { "time_segments:", SPH_SORT_TIME_SEGMENTS }, { "extended:", SPH_SORT_EXTENDED }, { "expr:", SPH_SORT_EXPR } }; int i; const int nModes = sizeof(dSortModes)/sizeof(dSortModes[0]); for ( i=0; i ( &tFilter.m_pValues, sValue ); if ( !tFilter.m_iValues ) { assert ( !tFilter.m_pValues ); break; } // all ok m_iFilters++; break; } } else if ( !strcmp ( sName, "indexweights" ) || !strcmp ( sName, "fieldweights" ) ) { bool bIndex = !strcmp ( sName, "indexweights" ); int * pCount = bIndex ? &m_iIndexWeights : &m_iFieldWeights; char ** pNames = bIndex ? &m_sIndexWeight[0] : &m_sFieldWeight[0]; int * pWeights = bIndex ? &m_iIndexWeight[0] : &m_iFieldWeight[0]; *pCount = 0; char * p = sValue; while ( *p && *pCountm_sName = chop(sName); pOverride->m_iType = iType; m_dOverrides.append ( pOverride ); } ulonglong uId = strtoull ( sId, NULL, 10 ); CSphSEQuery::Override_t::Value_t tValue; if ( iType==SPH_ATTR_FLOAT ) tValue.m_fValue = (float)atof(sValue); else if ( iType==SPH_ATTR_BIGINT ) tValue.m_iValue64 = strtoll ( sValue, NULL, 10 ); else tValue.m_uValue = (uint32)strtoul ( sValue, NULL, 10 ); pOverride->m_dIds.append ( uId ); pOverride->m_dValues.append ( tValue ); } if ( !pOverride ) { snprintf ( m_sParseError, sizeof(m_sParseError), "override: id:value mapping expected" ); SPH_RET(false); } SPH_RET(true); } else { snprintf ( m_sParseError, sizeof(m_sParseError), "unknown parameter '%s'", sName ); SPH_RET(false); } // !COMMIT handle syntax errors SPH_RET(true); } bool CSphSEQuery::Parse () { SPH_ENTER_METHOD(); SPH_DEBUG ( "query [[ %s ]]", m_sQueryBuffer ); m_bQuery = false; char * pCur = m_sQueryBuffer; char * pNext = pCur; while ( ( pNext = strchr ( pNext, ';' ) )!=NULL ) { // handle escaped semicolons if ( pNext>m_sQueryBuffer && pNext[-1]=='\\' && pNext[1]!='\0' ) { pNext++; continue; } // handle semicolon-separated clauses *pNext++ = '\0'; if ( !ParseField ( pCur ) ) SPH_RET(false); pCur = pNext; } SPH_DEBUG ( "q [[ %s ]]", m_sQuery ); SPH_RET(true); } void CSphSEQuery::SendBytes ( const void * pBytes, int iBytes ) { SPH_ENTER_METHOD(); if ( m_iBufLeftm_iType==SPH_ATTR_BIGINT ? 16 : 12; // id64 + value iReqSize += strlen ( pOverride->m_sName ) + 12 + uSize*pOverride->m_dIds.elements(); } // select iReqSize += 4; m_iBufLeft = 0; SafeDeleteArray ( m_pBuf ); m_pBuf = new char [ iReqSize ]; if ( !m_pBuf ) SPH_RET(-1); m_pCur = m_pBuf; m_iBufLeft = iReqSize; m_bBufOverrun = false; (*ppBuffer) = m_pBuf; // build request SendWord ( SEARCHD_COMMAND_SEARCH ); // command id SendWord ( VER_COMMAND_SEARCH ); // command version SendInt ( iReqSize-8 ); // packet body length SendInt ( 0 ); // its a client SendInt ( 1 ); // number of queries SendInt ( m_iOffset ); SendInt ( m_iLimit ); SendInt ( m_eMode ); SendInt ( m_eRanker ); // 1.16+ if ( m_eRanker==SPH_RANK_EXPR ) SendString ( m_sRankExpr ); SendInt ( m_eSort ); SendString ( m_sSortBy ); // sort attr SendString ( m_sQuery ); // query SendInt ( m_iWeights ); for ( int j=0; jm_sName ); SendDword ( pOverride->m_iType ); SendInt ( pOverride->m_dIds.elements() ); for ( int j=0; jm_dIds.elements(); j++ ) { SendUint64 ( pOverride->m_dIds.at(j) ); if ( pOverride->m_iType==SPH_ATTR_FLOAT ) SendFloat ( pOverride->m_dValues.at(j).m_fValue ); else if ( pOverride->m_iType==SPH_ATTR_BIGINT ) SendUint64 ( pOverride->m_dValues.at(j).m_iValue64 ); else SendDword ( pOverride->m_dValues.at(j).m_uValue ); } } // select SendString ( m_sSelect ); // detect buffer overruns and underruns, and report internal error if ( m_bBufOverrun || m_iBufLeft!=0 || m_pCur-m_pBuf!=iReqSize ) SPH_RET(-1); // all fine SPH_RET ( iReqSize ); } ////////////////////////////////////////////////////////////////////////////// // SPHINX HANDLER ////////////////////////////////////////////////////////////////////////////// static const char * ha_sphinx_exts[] = { NullS }; #if MYSQL_VERSION_ID<50100 ha_sphinx::ha_sphinx ( TABLE_ARG * table ) : handler ( &sphinx_hton, table ) #else ha_sphinx::ha_sphinx ( handlerton * hton, TABLE_ARG * table ) : handler ( hton, table ) #endif , m_pShare ( NULL ) , m_iMatchesTotal ( 0 ) , m_iCurrentPos ( 0 ) , m_pCurrentKey ( NULL ) , m_iCurrentKeyLen ( 0 ) , m_pResponse ( NULL ) , m_pResponseEnd ( NULL ) , m_pCur ( NULL ) , m_bUnpackError ( false ) , m_iFields ( 0 ) , m_dFields ( NULL ) , m_iAttrs ( 0 ) , m_dAttrs ( NULL ) , m_bId64 ( 0 ) , m_dUnboundFields ( NULL ) { SPH_ENTER_METHOD(); if ( current_thd ) current_thd->variables.engine_condition_pushdown = true; SPH_VOID_RET(); } // If frm_error() is called then we will use this to to find out what file extentions // exist for the storage engine. This is also used by the default rename_table and // delete_table method in handler.cc. const char ** ha_sphinx::bas_ext() const { return ha_sphinx_exts; } // Used for opening tables. The name will be the name of the file. // A table is opened when it needs to be opened. For instance // when a request comes in for a select on the table (tables are not // open and closed for each request, they are cached). // // Called from handler.cc by handler::ha_open(). The server opens all tables by // calling ha_open() which then calls the handler specific open(). int ha_sphinx::open ( const char * name, int, uint ) { SPH_ENTER_METHOD(); m_pShare = get_share ( name, table ); if ( !m_pShare ) SPH_RET(1); thr_lock_data_init ( &m_pShare->m_tLock, &m_tLock, NULL ); #if MYSQL_VERSION_ID>50100 *thd_ha_data ( table->in_use, ht ) = NULL; #else table->in_use->ha_data [ sphinx_hton.slot ] = NULL; #endif SPH_RET(0); } int ha_sphinx::Connect ( const char * sHost, ushort uPort ) { struct sockaddr_in sin; #ifndef __WIN__ struct sockaddr_un saun; #endif int iDomain = 0; int iSockaddrSize = 0; struct sockaddr * pSockaddr = NULL; in_addr_t ip_addr; if ( uPort ) { iDomain = AF_INET; iSockaddrSize = sizeof(sin); pSockaddr = (struct sockaddr *) &sin; memset ( &sin, 0, sizeof(sin) ); sin.sin_family = AF_INET; sin.sin_port = htons(uPort); // prepare host address if ( (int)( ip_addr = inet_addr(sHost) )!=(int)INADDR_NONE ) { memcpy ( &sin.sin_addr, &ip_addr, sizeof(ip_addr) ); } else { int tmp_errno; bool bError = false; #if MYSQL_VERSION_ID>=50515 struct addrinfo *hp = NULL; tmp_errno = getaddrinfo ( sHost, NULL, NULL, &hp ); if ( !tmp_errno || !hp || !hp->ai_addr ) { bError = true; if ( hp ) freeaddrinfo ( hp ); } #else struct hostent tmp_hostent, *hp; char buff2 [ GETHOSTBYNAME_BUFF_SIZE ]; hp = my_gethostbyname_r ( sHost, &tmp_hostent, buff2, sizeof(buff2), &tmp_errno ); if ( !hp ) { my_gethostbyname_r_free(); bError = true; } #endif if ( bError ) { char sError[256]; my_snprintf ( sError, sizeof(sError), "failed to resolve searchd host (name=%s)", sHost ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET(-1); } #if MYSQL_VERSION_ID>=50515 memcpy ( &sin.sin_addr, hp->ai_addr, Min ( sizeof(sin.sin_addr), (size_t)hp->ai_addrlen ) ); freeaddrinfo ( hp ); #else memcpy ( &sin.sin_addr, hp->h_addr, Min ( sizeof(sin.sin_addr), (size_t)hp->h_length ) ); my_gethostbyname_r_free(); #endif } } else { #ifndef __WIN__ iDomain = AF_UNIX; iSockaddrSize = sizeof(saun); pSockaddr = (struct sockaddr *) &saun; memset ( &saun, 0, sizeof(saun) ); saun.sun_family = AF_UNIX; strncpy ( saun.sun_path, sHost, sizeof(saun.sun_path)-1 ); #else my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), "UNIX sockets are not supported on Windows" ); SPH_RET(-1); #endif } char sError[512]; int iSocket = socket ( iDomain, SOCK_STREAM, 0 ); if ( iSocket<0 ) { my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), "failed to create client socket" ); SPH_RET(-1); } if ( connect ( iSocket, pSockaddr, iSockaddrSize )<0 ) { sphSockClose ( iSocket ); my_snprintf ( sError, sizeof(sError), "failed to connect to searchd (host=%s, errno=%d, port=%d)", sHost, errno, (int)uPort ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET(-1); } return iSocket; } int ha_sphinx::ConnectAPI ( const char * sQueryHost, int iQueryPort ) { SPH_ENTER_METHOD(); const char * sHost = ( sQueryHost && *sQueryHost ) ? sQueryHost : m_pShare->m_sHost; ushort uPort = iQueryPort ? (ushort)iQueryPort : m_pShare->m_iPort; int iSocket = Connect ( sHost, uPort ); if ( iSocket<0 ) SPH_RET ( iSocket ); char sError[512]; int version; if ( ::recv ( iSocket, (char *)&version, sizeof(version), 0 )!=sizeof(version) ) { sphSockClose ( iSocket ); my_snprintf ( sError, sizeof(sError), "failed to receive searchd version (host=%s, port=%d)", sHost, (int)uPort ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET(-1); } uint uClientVersion = htonl ( SPHINX_SEARCHD_PROTO ); if ( ::send ( iSocket, (char*)&uClientVersion, sizeof(uClientVersion), 0 )!=sizeof(uClientVersion) ) { sphSockClose ( iSocket ); my_snprintf ( sError, sizeof(sError), "failed to send client version (host=%s, port=%d)", sHost, (int)uPort ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET(-1); } SPH_RET ( iSocket ); } // Closes a table. We call the free_share() function to free any resources // that we have allocated in the "shared" structure. // // Called from sql_base.cc, sql_select.cc, and table.cc. // In sql_select.cc it is only used to close up temporary tables or during // the process where a temporary table is converted over to being a // myisam table. // For sql_base.cc look at close_data_tables(). int ha_sphinx::close() { SPH_ENTER_METHOD(); SPH_RET ( free_share ( m_pShare ) ); } int ha_sphinx::HandleMysqlError ( MYSQL * pConn, int iErrCode ) { CSphSEThreadData * pTls = GetTls (); if ( pTls ) { strncpy ( pTls->m_tStats.m_sLastMessage, mysql_error ( pConn ), sizeof ( pTls->m_tStats.m_sLastMessage ) ); pTls->m_tStats.m_bLastError = true; } mysql_close ( pConn ); my_error ( iErrCode, MYF(0), pTls->m_tStats.m_sLastMessage ); return -1; } int ha_sphinx::extra ( enum ha_extra_function op ) { CSphSEThreadData * pTls = GetTls(); if ( pTls ) { if ( op==HA_EXTRA_WRITE_CAN_REPLACE ) pTls->m_bReplace = true; else if ( op==HA_EXTRA_WRITE_CANNOT_REPLACE ) pTls->m_bReplace = false; } return 0; } int ha_sphinx::write_row ( byte * ) { SPH_ENTER_METHOD(); if ( !m_pShare || !m_pShare->m_bSphinxQL ) SPH_RET ( HA_ERR_WRONG_COMMAND ); // SphinxQL inserts only, pretty much similar to abandoned federated char sQueryBuf[1024]; char sValueBuf[1024]; String sQuery ( sQueryBuf, sizeof(sQueryBuf), &my_charset_bin ); String sValue ( sValueBuf, sizeof(sQueryBuf), &my_charset_bin ); sQuery.length ( 0 ); sValue.length ( 0 ); CSphSEThreadData * pTls = GetTls (); sQuery.append ( pTls && pTls->m_bReplace ? "REPLACE INTO " : "INSERT INTO " ); sQuery.append ( m_pShare->m_sIndex ); sQuery.append ( " (" ); for ( Field ** ppField = table->field; *ppField; ppField++ ) { sQuery.append ( (*ppField)->field_name ); if ( ppField[1] ) sQuery.append ( ", " ); } sQuery.append ( ") VALUES (" ); for ( Field ** ppField = table->field; *ppField; ppField++ ) { if ( (*ppField)->is_null() ) { sQuery.append ( "''" ); } else { if ( (*ppField)->type()==MYSQL_TYPE_TIMESTAMP ) { Item_field * pWrap = new Item_field ( *ppField ); // autofreed by query arena, I assume Item_func_unix_timestamp * pConv = new Item_func_unix_timestamp ( pWrap ); pConv->quick_fix_field(); unsigned int uTs = (unsigned int) pConv->val_int(); snprintf ( sValueBuf, sizeof(sValueBuf), "'%u'", uTs ); sQuery.append ( sValueBuf ); } else { (*ppField)->val_str ( &sValue ); sQuery.append ( "'" ); sValue.print ( &sQuery ); sQuery.append ( "'" ); sValue.length(0); } } if ( ppField[1] ) sQuery.append ( ", " ); } sQuery.append ( ")" ); // FIXME? pretty inefficient to reconnect every time under high load, // but this was intentionally written for a low load scenario.. MYSQL * pConn = mysql_init ( NULL ); if ( !pConn ) SPH_RET ( ER_OUT_OF_RESOURCES ); unsigned int uTimeout = 1; mysql_options ( pConn, MYSQL_OPT_CONNECT_TIMEOUT, (const char*)&uTimeout ); if ( !mysql_real_connect ( pConn, m_pShare->m_sHost, "root", "", "", m_pShare->m_iPort, m_pShare->m_sSocket, 0 ) ) SPH_RET ( HandleMysqlError ( pConn, ER_CONNECT_TO_FOREIGN_DATA_SOURCE ) ); if ( mysql_real_query ( pConn, sQuery.ptr(), sQuery.length() ) ) SPH_RET ( HandleMysqlError ( pConn, ER_QUERY_ON_FOREIGN_DATA_SOURCE ) ); // all ok! mysql_close ( pConn ); SPH_RET(0); } static inline bool IsIntegerFieldType ( enum_field_types eType ) { return eType==MYSQL_TYPE_LONG || eType==MYSQL_TYPE_LONGLONG; } static inline bool IsIDField ( Field * pField ) { enum_field_types eType = pField->type(); if ( eType==MYSQL_TYPE_LONGLONG ) return true; if ( eType==MYSQL_TYPE_LONG && ((Field_num*)pField)->unsigned_flag ) return true; return false; } int ha_sphinx::delete_row ( const byte * ) { SPH_ENTER_METHOD(); if ( !m_pShare || !m_pShare->m_bSphinxQL ) SPH_RET ( HA_ERR_WRONG_COMMAND ); char sQueryBuf[1024]; String sQuery ( sQueryBuf, sizeof(sQueryBuf), &my_charset_bin ); sQuery.length ( 0 ); sQuery.append ( "DELETE FROM " ); sQuery.append ( m_pShare->m_sIndex ); sQuery.append ( " WHERE id=" ); char sValue[32]; snprintf ( sValue, sizeof(sValue), "%lld", table->field[0]->val_int() ); sQuery.append ( sValue ); // FIXME? pretty inefficient to reconnect every time under high load, // but this was intentionally written for a low load scenario.. MYSQL * pConn = mysql_init ( NULL ); if ( !pConn ) SPH_RET ( ER_OUT_OF_RESOURCES ); unsigned int uTimeout = 1; mysql_options ( pConn, MYSQL_OPT_CONNECT_TIMEOUT, (const char*)&uTimeout ); if ( !mysql_real_connect ( pConn, m_pShare->m_sHost, "root", "", "", m_pShare->m_iPort, m_pShare->m_sSocket, 0 ) ) SPH_RET ( HandleMysqlError ( pConn, ER_CONNECT_TO_FOREIGN_DATA_SOURCE ) ); if ( mysql_real_query ( pConn, sQuery.ptr(), sQuery.length() ) ) SPH_RET ( HandleMysqlError ( pConn, ER_QUERY_ON_FOREIGN_DATA_SOURCE ) ); // all ok! mysql_close ( pConn ); SPH_RET(0); } int ha_sphinx::update_row ( const byte *, byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } // keynr is key (index) number // sorted is 1 if result MUST be sorted according to index int ha_sphinx::index_init ( uint keynr, bool ) { SPH_ENTER_METHOD(); active_index = keynr; CSphSEThreadData * pTls = GetTls(); if ( pTls ) pTls->m_bCondDone = false; SPH_RET(0); } int ha_sphinx::index_end() { SPH_ENTER_METHOD(); SPH_RET(0); } bool ha_sphinx::CheckResponcePtr ( int iLen ) { if ( m_pCur+iLen>m_pResponseEnd ) { m_pCur = m_pResponseEnd; m_bUnpackError = true; return false; } return true; } uint32 ha_sphinx::UnpackDword () { if ( !CheckResponcePtr ( sizeof(uint32) ) ) // NOLINT { return 0; } uint32 uRes = ntohl ( sphUnalignedRead ( *(uint32*)m_pCur ) ); m_pCur += sizeof(uint32); // NOLINT return uRes; } char * ha_sphinx::UnpackString () { uint32 iLen = UnpackDword (); if ( !iLen ) return NULL; if ( !CheckResponcePtr ( iLen ) ) { return NULL; } char * sRes = new char [ 1+iLen ]; memcpy ( sRes, m_pCur, iLen ); sRes[iLen] = '\0'; m_pCur += iLen; return sRes; } static inline const char * FixNull ( const char * s ) { return s ? s : "(null)"; } bool ha_sphinx::UnpackSchema () { SPH_ENTER_METHOD(); // cleanup if ( m_dFields ) for ( int i=0; i<(int)m_iFields; i++ ) SafeDeleteArray ( m_dFields[i] ); SafeDeleteArray ( m_dFields ); // unpack network packet uint32 uStatus = UnpackDword (); char * sMessage = NULL; if ( uStatus!=SEARCHD_OK ) { sMessage = UnpackString (); CSphSEThreadData * pTls = GetTls (); if ( pTls ) { strncpy ( pTls->m_tStats.m_sLastMessage, sMessage, sizeof(pTls->m_tStats.m_sLastMessage) ); pTls->m_tStats.m_bLastError = ( uStatus==SEARCHD_ERROR ); } if ( uStatus==SEARCHD_ERROR ) { char sError[1024]; my_snprintf ( sError, sizeof(sError), "searchd error: %s", sMessage ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sError ); SafeDeleteArray ( sMessage ); SPH_RET ( false ); } } m_iFields = UnpackDword (); m_dFields = new char * [ m_iFields ]; if ( !m_dFields ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: UnpackSchema() failed (fields alloc error)" ); SPH_RET(false); } for ( uint32 i=0; im_iTableFields; j++ ) { const char * sTableField = m_pShare->m_sTableField[j]; const char * sAttrField = m_dAttrs[i].m_sName; if ( m_dAttrs[i].m_sName[0]=='@' ) { const char * sAtPrefix = "_sph_"; if ( strncmp ( sTableField, sAtPrefix, strlen(sAtPrefix) ) ) continue; sTableField += strlen(sAtPrefix); sAttrField++; } if ( !strcasecmp ( sAttrField, sTableField ) ) { // we're almost good, but // let's enforce that timestamp columns can only receive timestamp attributes if ( m_pShare->m_eTableFieldType[j]!=MYSQL_TYPE_TIMESTAMP || m_dAttrs[i].m_uType==SPH_ATTR_TIMESTAMP ) m_dAttrs[i].m_iField = j; break; } } } m_iMatchesTotal = UnpackDword (); m_bId64 = UnpackDword (); if ( m_bId64 && m_pShare->m_eTableFieldType[0]!=MYSQL_TYPE_LONGLONG ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: 1st column must be bigint to accept 64-bit DOCIDs" ); SPH_RET(false); } // network packet unpacked; build unbound fields map SafeDeleteArray ( m_dUnboundFields ); m_dUnboundFields = new int [ m_pShare->m_iTableFields ]; for ( int i=0; im_iTableFields; i++ ) { if ( im_eTableFieldType[i]==MYSQL_TYPE_TIMESTAMP ) m_dUnboundFields[i] = SPH_ATTR_TIMESTAMP; else m_dUnboundFields[i] = SPH_ATTR_INTEGER; } for ( uint32 i=0; i=0 ) m_dUnboundFields [ m_dAttrs[i].m_iField ] = SPH_ATTR_NONE; if ( m_bUnpackError ) my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: UnpackSchema() failed (unpack error)" ); SPH_RET ( !m_bUnpackError ); } bool ha_sphinx::UnpackStats ( CSphSEStats * pStats ) { assert ( pStats ); char * pCurSave = m_pCur; for ( uint i=0; im_iMatchesTotal = UnpackDword (); pStats->m_iMatchesFound = UnpackDword (); pStats->m_iQueryMsec = UnpackDword (); pStats->m_iWords = UnpackDword (); if ( m_bUnpackError ) return false; SafeDeleteArray ( pStats->m_dWords ); if ( pStats->m_iWords<0 || pStats->m_iWords>=SPHINXSE_MAX_KEYWORDSTATS ) return false; pStats->m_dWords = new CSphSEWordStats [ pStats->m_iWords ]; if ( !pStats->m_dWords ) return false; for ( int i=0; im_iWords; i++ ) { CSphSEWordStats & tWord = pStats->m_dWords[i]; tWord.m_sWord = UnpackString (); tWord.m_iDocs = UnpackDword (); tWord.m_iHits = UnpackDword (); } if ( m_bUnpackError ) return false; m_pCur = pCurSave; return true; } /// condition pushdown implementation, to properly intercept WHERE clauses on my columns const COND * ha_sphinx::cond_push ( const COND * cond ) { // catch the simplest case: query_column="some text" for ( ;; ) { if ( cond->type()!=COND::FUNC_ITEM ) break; Item_func * condf = (Item_func *)cond; if ( condf->functype()!=Item_func::EQ_FUNC || condf->argument_count()!=2 ) break; // get my tls CSphSEThreadData * pTls = GetTls (); if ( !pTls ) break; Item ** args = condf->arguments(); if ( !m_pShare->m_bSphinxQL ) { // on non-QL tables, intercept query=value condition for SELECT if (!( args[0]->type()==COND::FIELD_ITEM && args[1]->type()==COND::STRING_ITEM )) break; Item_field * pField = (Item_field *) args[0]; if ( pField->field->field_index!=2 ) // FIXME! magic key index break; // copy the query, and let know that we intercepted this condition Item_string * pString = (Item_string *) args[1]; pTls->m_bQuery = true; strncpy ( pTls->m_sQuery, pString->str_value.c_ptr(), sizeof(pTls->m_sQuery) ); pTls->m_sQuery[sizeof(pTls->m_sQuery)-1] = '\0'; pTls->m_pQueryCharset = pString->str_value.charset(); } else { if (!( args[0]->type()==COND::FIELD_ITEM && args[1]->type()==COND::INT_ITEM )) break; // on QL tables, intercept id=value condition for DELETE Item_field * pField = (Item_field *) args[0]; if ( pField->field->field_index!=0 ) // FIXME! magic key index break; Item_int * pVal = (Item_int *) args[1]; pTls->m_iCondId = pVal->val_int(); pTls->m_bCondId = true; } // we intercepted this condition return NULL; } // don't change anything return cond; } /// condition popup void ha_sphinx::cond_pop () { CSphSEThreadData * pTls = GetTls (); if ( pTls ) pTls->m_bQuery = false; } /// get TLS (maybe allocate it, too) CSphSEThreadData * ha_sphinx::GetTls() { // where do we store that pointer in today's version? CSphSEThreadData ** ppTls; #if MYSQL_VERSION_ID>50100 ppTls = (CSphSEThreadData**) thd_ha_data ( table->in_use, ht ); #else ppTls = (CSphSEThreadData**) ¤t_thd->ha_data[sphinx_hton.slot]; #endif // >50100 // allocate if needed if ( !*ppTls ) *ppTls = new CSphSEThreadData (); // errors will be handled by caller return *ppTls; } // Positions an index cursor to the index specified in the handle. Fetches the // row if available. If the key value is null, begin at the first key of the // index. int ha_sphinx::index_read ( byte * buf, const byte * key, uint key_len, enum ha_rkey_function ) { SPH_ENTER_METHOD(); char sError[256]; // set new data for thd->ha_data, it is used in show_status CSphSEThreadData * pTls = GetTls(); if ( !pTls ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: TLS malloc() failed" ); SPH_RET ( HA_ERR_END_OF_FILE ); } pTls->m_tStats.Reset (); // sphinxql table, just return the key once if ( m_pShare->m_bSphinxQL ) { // over and out if ( pTls->m_bCondDone ) SPH_RET ( HA_ERR_END_OF_FILE ); // return a value from pushdown, if any if ( pTls->m_bCondId ) { table->field[0]->store ( pTls->m_iCondId, 1 ); pTls->m_bCondDone = true; SPH_RET(0); } // return a value from key longlong iRef = 0; if ( key_len==4 ) iRef = uint4korr ( key ); else if ( key_len==8 ) iRef = uint8korr ( key ); else { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: unexpected key length" ); SPH_RET ( HA_ERR_END_OF_FILE ); } table->field[0]->store ( iRef, 1 ); pTls->m_bCondDone = true; SPH_RET(0); } // parse query if ( pTls->m_bQuery ) { // we have a query from condition pushdown m_pCurrentKey = (const byte *) pTls->m_sQuery; m_iCurrentKeyLen = strlen(pTls->m_sQuery); } else { // just use the key (might be truncated) m_pCurrentKey = key+HA_KEY_BLOB_LENGTH; m_iCurrentKeyLen = uint2korr(key); // or maybe key_len? pTls->m_pQueryCharset = m_pShare ? m_pShare->m_pTableQueryCharset : NULL; } CSphSEQuery q ( (const char*)m_pCurrentKey, m_iCurrentKeyLen, m_pShare->m_sIndex ); if ( !q.Parse () ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), q.m_sParseError ); SPH_RET ( HA_ERR_END_OF_FILE ); } // do connect int iSocket = ConnectAPI ( q.m_sHost, q.m_iPort ); if ( iSocket<0 ) SPH_RET ( HA_ERR_END_OF_FILE ); // my buffer char * pBuffer; // will be free by CSphSEQuery dtor; do NOT free manually int iReqLen = q.BuildRequest ( &pBuffer ); if ( iReqLen<=0 ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: q.BuildRequest() failed" ); SPH_RET ( HA_ERR_END_OF_FILE ); } // send request ::send ( iSocket, pBuffer, iReqLen, 0 ); // receive reply char sHeader[8]; int iGot = ::recv ( iSocket, sHeader, sizeof(sHeader), RECV_FLAGS ); if ( iGot!=sizeof(sHeader) ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "failed to receive response header (searchd went away?)" ); SPH_RET ( HA_ERR_END_OF_FILE ); } short int uRespStatus = ntohs ( sphUnalignedRead ( *(short int*)( &sHeader[0] ) ) ); short int uRespVersion = ntohs ( sphUnalignedRead ( *(short int*)( &sHeader[2] ) ) ); uint uRespLength = ntohl ( sphUnalignedRead ( *(uint *)( &sHeader[4] ) ) ); SPH_DEBUG ( "got response header (status=%d version=%d length=%d)", uRespStatus, uRespVersion, uRespLength ); SafeDeleteArray ( m_pResponse ); if ( uRespLength<=SPHINXSE_MAX_ALLOC ) m_pResponse = new char [ uRespLength+1 ]; if ( !m_pResponse ) { my_snprintf ( sError, sizeof(sError), "bad searchd response length (length=%u)", uRespLength ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET ( HA_ERR_END_OF_FILE ); } int iRecvLength = 0; while ( iRecvLength<(int)uRespLength ) { int iRecv = ::recv ( iSocket, m_pResponse+iRecvLength, uRespLength-iRecvLength, RECV_FLAGS ); if ( iRecv<0 ) break; iRecvLength += iRecv; } ::closesocket ( iSocket ); iSocket = -1; if ( iRecvLength!=(int)uRespLength ) { my_snprintf ( sError, sizeof(sError), "net read error (expected=%d, got=%d)", uRespLength, iRecvLength ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sError ); SPH_RET ( HA_ERR_END_OF_FILE ); } // we'll have a message, at least pTls->m_bStats = true; // parse reply m_iCurrentPos = 0; m_pCur = m_pResponse; m_pResponseEnd = m_pResponse + uRespLength; m_bUnpackError = false; if ( uRespStatus!=SEARCHD_OK ) { char * sMessage = UnpackString (); if ( !sMessage ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "no valid response from searchd (status=%d, resplen=%d)", uRespStatus, uRespLength ); SPH_RET ( HA_ERR_END_OF_FILE ); } strncpy ( pTls->m_tStats.m_sLastMessage, sMessage, sizeof(pTls->m_tStats.m_sLastMessage) ); SafeDeleteArray ( sMessage ); if ( uRespStatus!=SEARCHD_WARNING ) { my_snprintf ( sError, sizeof(sError), "searchd error: %s", pTls->m_tStats.m_sLastMessage ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sError ); pTls->m_tStats.m_bLastError = true; SPH_RET ( HA_ERR_END_OF_FILE ); } } if ( !UnpackSchema () ) SPH_RET ( HA_ERR_END_OF_FILE ); if ( !UnpackStats ( &pTls->m_tStats ) ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: UnpackStats() failed" ); SPH_RET ( HA_ERR_END_OF_FILE ); } SPH_RET ( get_rec ( buf, key, key_len ) ); } // Positions an index cursor to the index specified in key. Fetches the // row if any. This is only used to read whole keys. int ha_sphinx::index_read_idx ( byte *, uint, const byte *, uint, enum ha_rkey_function ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } // Used to read forward through the index. int ha_sphinx::index_next ( byte * buf ) { SPH_ENTER_METHOD(); SPH_RET ( get_rec ( buf, m_pCurrentKey, m_iCurrentKeyLen ) ); } int ha_sphinx::index_next_same ( byte * buf, const byte * key, uint keylen ) { SPH_ENTER_METHOD(); SPH_RET ( get_rec ( buf, key, keylen ) ); } int ha_sphinx::get_rec ( byte * buf, const byte *, uint ) { SPH_ENTER_METHOD(); if ( m_iCurrentPos>=m_iMatchesTotal ) { SafeDeleteArray ( m_pResponse ); SPH_RET ( HA_ERR_END_OF_FILE ); } #if MYSQL_VERSION_ID>50100 my_bitmap_map * org_bitmap = dbug_tmp_use_all_columns ( table, table->write_set ); #endif Field ** field = table->field; // unpack and return the match longlong uMatchID = UnpackDword (); if ( m_bId64 ) uMatchID = ( uMatchID<<32 ) + UnpackDword(); uint32 uMatchWeight = UnpackDword (); field[0]->store ( uMatchID, 1 ); field[1]->store ( uMatchWeight, 1 ); field[2]->store ( (const char*)m_pCurrentKey, m_iCurrentKeyLen, &my_charset_bin ); for ( uint32 i=0; i0 && !m_bUnpackError; uValue-- ) UnpackDword(); } else if ( m_dAttrs[i].m_uType==SPH_ATTR_STRING && CheckResponcePtr ( uValue ) ) { m_pCur += uValue; } continue; } Field * af = field [ m_dAttrs[i].m_iField ]; switch ( m_dAttrs[i].m_uType ) { case SPH_ATTR_INTEGER: case SPH_ATTR_ORDINAL: case SPH_ATTR_BOOL: af->store ( uValue, 1 ); break; case SPH_ATTR_FLOAT: af->store ( sphDW2F(uValue) ); break; case SPH_ATTR_TIMESTAMP: if ( af->type()==MYSQL_TYPE_TIMESTAMP ) longstore ( af->ptr, uValue ); // because store() does not accept timestamps else af->store ( uValue, 1 ); break; case SPH_ATTR_BIGINT: af->store ( iValue64, 0 ); break; case SPH_ATTR_STRING: if ( !uValue ) af->store ( "", 0, &my_charset_bin ); else if ( CheckResponcePtr ( uValue ) ) { af->store ( m_pCur, uValue, &my_charset_bin ); m_pCur += uValue; } break; case SPH_ATTR_UINT64SET: case SPH_ATTR_UINT32SET : if ( uValue<=0 ) { // shortcut, empty MVA set af->store ( "", 0, &my_charset_bin ); } else { // convert MVA set to comma-separated string char sBuf[1024]; // FIXME! magic size char * pCur = sBuf; if ( m_dAttrs[i].m_uType==SPH_ATTR_UINT32SET ) { for ( ; uValue>0 && !m_bUnpackError; uValue-- ) { uint32 uEntry = UnpackDword (); if ( pCur < sBuf+sizeof(sBuf)-16 ) // 10 chars per 32bit value plus some safety bytes { snprintf ( pCur, sBuf+sizeof(sBuf)-pCur, "%u", uEntry ); while ( *pCur ) *pCur++; if ( uValue>1 ) *pCur++ = ','; // non-trailing commas } } } else { for ( ; uValue>0 && !m_bUnpackError; uValue-=2 ) { uint32 uEntryLo = UnpackDword (); uint32 uEntryHi = UnpackDword(); if ( pCur < sBuf+sizeof(sBuf)-24 ) // 20 chars per 64bit value plus some safety bytes { snprintf ( pCur, sBuf+sizeof(sBuf)-pCur, "%u%u", uEntryHi, uEntryLo ); while ( *pCur ) *pCur++; if ( uValue>2 ) *pCur++ = ','; // non-trailing commas } } } af->store ( sBuf, pCur-sBuf, &my_charset_bin ); } break; default: my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: unhandled attr type" ); SafeDeleteArray ( m_pResponse ); SPH_RET ( HA_ERR_END_OF_FILE ); } } if ( m_bUnpackError ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: response unpacker failed" ); SafeDeleteArray ( m_pResponse ); SPH_RET ( HA_ERR_END_OF_FILE ); } // zero out unmapped fields for ( int i=SPHINXSE_SYSTEM_COLUMNS; i<(int)table->s->fields; i++ ) if ( m_dUnboundFields[i]!=SPH_ATTR_NONE ) switch ( m_dUnboundFields[i] ) { case SPH_ATTR_INTEGER: table->field[i]->store ( 0, 1 ); break; case SPH_ATTR_TIMESTAMP: longstore ( table->field[i]->ptr, 0 ); break; default: my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: unhandled unbound field type %d", m_dUnboundFields[i] ); SafeDeleteArray ( m_pResponse ); SPH_RET ( HA_ERR_END_OF_FILE ); } memset ( buf, 0, table->s->null_bytes ); m_iCurrentPos++; #if MYSQL_VERSION_ID > 50100 dbug_tmp_restore_column_map ( table->write_set, org_bitmap ); #endif SPH_RET(0); } // Used to read backwards through the index. int ha_sphinx::index_prev ( byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } // index_first() asks for the first key in the index. // // Called from opt_range.cc, opt_sum.cc, sql_handler.cc, // and sql_select.cc. int ha_sphinx::index_first ( byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_END_OF_FILE ); } // index_last() asks for the last key in the index. // // Called from opt_range.cc, opt_sum.cc, sql_handler.cc, // and sql_select.cc. int ha_sphinx::index_last ( byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } int ha_sphinx::rnd_init ( bool ) { SPH_ENTER_METHOD(); SPH_RET(0); } int ha_sphinx::rnd_end() { SPH_ENTER_METHOD(); SPH_RET(0); } int ha_sphinx::rnd_next ( byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_END_OF_FILE ); } void ha_sphinx::position ( const byte * ) { SPH_ENTER_METHOD(); SPH_VOID_RET(); } // This is like rnd_next, but you are given a position to use // to determine the row. The position will be of the type that you stored in // ref. You can use ha_get_ptr(pos,ref_length) to retrieve whatever key // or position you saved when position() was called. // Called from filesort.cc records.cc sql_insert.cc sql_select.cc sql_update.cc. int ha_sphinx::rnd_pos ( byte *, byte * ) { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } #if MYSQL_VERSION_ID>=50030 int ha_sphinx::info ( uint ) #else void ha_sphinx::info ( uint ) #endif { SPH_ENTER_METHOD(); if ( table->s->keys>0 ) table->key_info[0].rec_per_key[0] = 1; #if MYSQL_VERSION_ID>50100 stats.records = 20; #else records = 20; #endif #if MYSQL_VERSION_ID>=50030 SPH_RET(0); #else SPH_VOID_RET(); #endif } int ha_sphinx::reset () { SPH_ENTER_METHOD(); CSphSEThreadData * pTls = GetTls (); if ( pTls ) pTls->m_bQuery = false; SPH_RET(0); } int ha_sphinx::delete_all_rows() { SPH_ENTER_METHOD(); SPH_RET ( HA_ERR_WRONG_COMMAND ); } // First you should go read the section "locking functions for mysql" in // lock.cc to understand this. // This create a lock on the table. If you are implementing a storage engine // that can handle transacations look at ha_berkely.cc to see how you will // want to go about doing this. Otherwise you should consider calling flock() // here. // // Called from lock.cc by lock_external() and unlock_external(). Also called // from sql_table.cc by copy_data_between_tables(). int ha_sphinx::external_lock ( THD *, int ) { SPH_ENTER_METHOD(); SPH_RET(0); } THR_LOCK_DATA ** ha_sphinx::store_lock ( THD *, THR_LOCK_DATA ** to, enum thr_lock_type lock_type ) { SPH_ENTER_METHOD(); if ( lock_type!=TL_IGNORE && m_tLock.type==TL_UNLOCK ) m_tLock.type = lock_type; *to++ = &m_tLock; SPH_RET(to); } int ha_sphinx::delete_table ( const char * ) { SPH_ENTER_METHOD(); SPH_RET(0); } // Renames a table from one name to another from alter table call. // // If you do not implement this, the default rename_table() is called from // handler.cc and it will delete all files with the file extentions returned // by bas_ext(). // // Called from sql_table.cc by mysql_rename_table(). int ha_sphinx::rename_table ( const char *, const char * ) { SPH_ENTER_METHOD(); SPH_RET(0); } // Given a starting key, and an ending key estimate the number of rows that // will exist between the two. end_key may be empty which in case determine // if start_key matches any rows. // // Called from opt_range.cc by check_quick_keys(). ha_rows ha_sphinx::records_in_range ( uint, key_range *, key_range * ) { SPH_ENTER_METHOD(); SPH_RET(3); // low number to force index usage } // create() is called to create a database. The variable name will have the name // of the table. When create() is called you do not need to worry about opening // the table. Also, the FRM file will have already been created so adjusting // create_info will not do you any good. You can overwrite the frm file at this // point if you wish to change the table definition, but there are no methods // currently provided for doing that. // // Called from handle.cc by ha_create_table(). int ha_sphinx::create ( const char * name, TABLE * table, HA_CREATE_INFO * ) { SPH_ENTER_METHOD(); char sError[256]; CSphSEShare tInfo; if ( !ParseUrl ( &tInfo, table, true ) ) SPH_RET(-1); // check SphinxAPI table for ( ; !tInfo.m_bSphinxQL; ) { // check system fields (count and types) if ( table->s->fieldsfield[0] ) ) { my_snprintf ( sError, sizeof(sError), "%s: 1st column (docid) MUST be unsigned integer or bigint", name ); break; } if ( !IsIntegerFieldType ( table->field[1]->type() ) ) { my_snprintf ( sError, sizeof(sError), "%s: 2nd column (weight) MUST be integer or bigint", name ); break; } enum_field_types f2 = table->field[2]->type(); if ( f2!=MYSQL_TYPE_VARCHAR && f2!=MYSQL_TYPE_BLOB && f2!=MYSQL_TYPE_MEDIUM_BLOB && f2!=MYSQL_TYPE_LONG_BLOB && f2!=MYSQL_TYPE_TINY_BLOB ) { my_snprintf ( sError, sizeof(sError), "%s: 3rd column (search query) MUST be varchar or text", name ); break; } // check attributes int i; for ( i=3; i<(int)table->s->fields; i++ ) { enum_field_types eType = table->field[i]->type(); if ( eType!=MYSQL_TYPE_TIMESTAMP && !IsIntegerFieldType(eType) && eType!=MYSQL_TYPE_VARCHAR && eType!=MYSQL_TYPE_FLOAT ) { my_snprintf ( sError, sizeof(sError), "%s: %dth column (attribute %s) MUST be integer, bigint, timestamp, varchar, or float", name, i+1, table->field[i]->field_name ); break; } } if ( i!=(int)table->s->fields ) break; // check index if ( table->s->keys!=1 || table->key_info[0].key_parts!=1 || strcasecmp ( table->key_info[0].key_part[0].field->field_name, table->field[2]->field_name ) ) { my_snprintf ( sError, sizeof(sError), "%s: there must be an index on '%s' column", name, table->field[2]->field_name ); break; } // all good sError[0] = '\0'; break; } // check SphinxQL table for ( ; tInfo.m_bSphinxQL; ) { sError[0] = '\0'; // check that 1st column is id, is of int type, and has an index if ( strcmp ( table->field[0]->field_name, "id" ) ) { my_snprintf ( sError, sizeof(sError), "%s: 1st column must be called 'id'", name ); break; } if ( !IsIDField ( table->field[0] ) ) { my_snprintf ( sError, sizeof(sError), "%s: 'id' column must be INT UNSIGNED or BIGINT", name ); break; } // check index if ( table->s->keys!=1 || table->key_info[0].key_parts!=1 || strcasecmp ( table->key_info[0].key_part[0].field->field_name, "id" ) ) { my_snprintf ( sError, sizeof(sError), "%s: 'id' column must be indexed", name ); break; } // check column types for ( int i=1; i<(int)table->s->fields; i++ ) { enum_field_types eType = table->field[i]->type(); if ( eType!=MYSQL_TYPE_TIMESTAMP && !IsIntegerFieldType(eType) && eType!=MYSQL_TYPE_VARCHAR && eType!=MYSQL_TYPE_FLOAT ) { my_snprintf ( sError, sizeof(sError), "%s: column %d(%s) is of unsupported type (use int/bigint/timestamp/varchar/float)", name, i+1, table->field[i]->field_name ); break; } } if ( sError[0] ) break; // all good break; } // report and bail if ( sError[0] ) { my_error ( ER_CANT_CREATE_TABLE, MYF(0), sError, -1 ); SPH_RET(-1); } SPH_RET(0); } // show functions #if MYSQL_VERSION_ID<50100 #define SHOW_VAR_FUNC_BUFF_SIZE 1024 #endif CSphSEStats * sphinx_get_stats ( THD * thd, SHOW_VAR * out ) { #if MYSQL_VERSION_ID>50100 if ( sphinx_hton_ptr ) { CSphSEThreadData *pTls = (CSphSEThreadData *) *thd_ha_data ( thd, sphinx_hton_ptr ); if ( pTls && pTls->m_bStats ) return &pTls->m_tStats; } #else CSphSEThreadData *pTls = (CSphSEThreadData *) thd->ha_data[sphinx_hton.slot]; if ( pTls && pTls->m_bStats ) return &pTls->m_tStats; #endif out->type = SHOW_CHAR; out->value = ""; return 0; } int sphinx_showfunc_total ( THD * thd, SHOW_VAR * out, char * ) { CSphSEStats * pStats = sphinx_get_stats ( thd, out ); if ( pStats ) { out->type = SHOW_INT; out->value = (char *) &pStats->m_iMatchesTotal; } return 0; } int sphinx_showfunc_total_found ( THD * thd, SHOW_VAR * out, char * ) { CSphSEStats * pStats = sphinx_get_stats ( thd, out ); if ( pStats ) { out->type = SHOW_INT; out->value = (char *) &pStats->m_iMatchesFound; } return 0; } int sphinx_showfunc_time ( THD * thd, SHOW_VAR * out, char * ) { CSphSEStats * pStats = sphinx_get_stats ( thd, out ); if ( pStats ) { out->type = SHOW_INT; out->value = (char *) &pStats->m_iQueryMsec; } return 0; } int sphinx_showfunc_word_count ( THD * thd, SHOW_VAR * out, char * ) { CSphSEStats * pStats = sphinx_get_stats ( thd, out ); if ( pStats ) { out->type = SHOW_INT; out->value = (char *) &pStats->m_iWords; } return 0; } int sphinx_showfunc_words ( THD * thd, SHOW_VAR * out, char * sBuffer ) { #if MYSQL_VERSION_ID>50100 if ( sphinx_hton_ptr ) { CSphSEThreadData * pTls = (CSphSEThreadData *) *thd_ha_data ( thd, sphinx_hton_ptr ); #else { CSphSEThreadData * pTls = (CSphSEThreadData *) thd->ha_data[sphinx_hton.slot]; #endif if ( pTls && pTls->m_bStats ) { CSphSEStats * pStats = &pTls->m_tStats; if ( pStats && pStats->m_iWords ) { uint uBuffLen = 0; out->type = SHOW_CHAR; out->value = sBuffer; // the following is partially based on code in sphinx_show_status() sBuffer[0] = 0; for ( int i=0; im_iWords; i++ ) { CSphSEWordStats & tWord = pStats->m_dWords[i]; uBuffLen = my_snprintf ( sBuffer, SHOW_VAR_FUNC_BUFF_SIZE, "%s%s:%d:%d ", sBuffer, tWord.m_sWord, tWord.m_iDocs, tWord.m_iHits ); } if ( uBuffLen > 0 ) { // trim last space sBuffer [ --uBuffLen ] = 0; if ( pTls->m_pQueryCharset ) { // String::c_ptr() will nul-terminate the buffer. // // NOTE: It's not entirely clear whether this conversion is necessary at all. String sConvert; uint iErrors; sConvert.copy ( sBuffer, uBuffLen, pTls->m_pQueryCharset, system_charset_info, &iErrors ); memcpy ( sBuffer, sConvert.c_ptr(), sConvert.length() + 1 ); } } return 0; } } } out->type = SHOW_CHAR; out->value = ""; return 0; } int sphinx_showfunc_error ( THD * thd, SHOW_VAR * out, char * ) { CSphSEStats * pStats = sphinx_get_stats ( thd, out ); if ( pStats && pStats->m_bLastError ) { out->type = SHOW_CHAR; out->value = pStats->m_sLastMessage; } return 0; } #if MYSQL_VERSION_ID>50100 struct st_mysql_storage_engine sphinx_storage_engine = { MYSQL_HANDLERTON_INTERFACE_VERSION }; struct st_mysql_show_var sphinx_status_vars[] = { {"sphinx_total", (char *)sphinx_showfunc_total, SHOW_FUNC}, {"sphinx_total_found", (char *)sphinx_showfunc_total_found, SHOW_FUNC}, {"sphinx_time", (char *)sphinx_showfunc_time, SHOW_FUNC}, {"sphinx_word_count", (char *)sphinx_showfunc_word_count, SHOW_FUNC}, {"sphinx_words", (char *)sphinx_showfunc_words, SHOW_FUNC}, {"sphinx_error", (char *)sphinx_showfunc_error, SHOW_FUNC}, {0, 0, (enum_mysql_show_type)0} }; mysql_declare_plugin(sphinx) { MYSQL_STORAGE_ENGINE_PLUGIN, &sphinx_storage_engine, sphinx_hton_name, "Sphinx developers", sphinx_hton_comment, PLUGIN_LICENSE_GPL, sphinx_init_func, // Plugin Init sphinx_done_func, // Plugin Deinit 0x0001, // 0.1 sphinx_status_vars, NULL, NULL } mysql_declare_plugin_end; #endif // >50100 // // $Id: ha_sphinx.cc 3133 2012-03-01 13:47:52Z shodan $ // sphinx-2.0.4-release/mysqlse/sphinx.5.0.91.diff0000644000176700017710000003311711414706022020336 0ustar deogardeogardiff -r 319c65835581 CMakeLists.txt --- a/CMakeLists.txt Sun Jun 20 15:15:01 2010 +0400 +++ b/CMakeLists.txt Sun Jun 20 15:59:31 2010 +0400 @@ -70,6 +70,10 @@ ADD_DEFINITIONS(-DHAVE_INNOBASE_DB) ENDIF(WITH_INNOBASE_STORAGE_ENGINE) +IF(WITH_SPHINX_STORAGE_ENGINE) + ADD_DEFINITIONS(-DHAVE_SPHINX_DB) +ENDIF(WITH_SPHINX_STORAGE_ENGINE) + SET(localstatedir "C:\\mysql\\data") CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/support-files/my-huge.cnf.sh ${CMAKE_SOURCE_DIR}/support-files/my-huge.ini @ONLY) diff -r 319c65835581 configure.in --- a/configure.in Sun Jun 20 15:15:01 2010 +0400 +++ b/configure.in Sun Jun 20 15:59:31 2010 +0400 @@ -60,6 +60,7 @@ sinclude(config/ac-macros/ha_berkeley.m4) sinclude(config/ac-macros/ha_blackhole.m4) sinclude(config/ac-macros/ha_example.m4) +sinclude(config/ac-macros/ha_sphinx.m4) sinclude(config/ac-macros/ha_federated.m4) sinclude(config/ac-macros/ha_innodb.m4) sinclude(config/ac-macros/ha_ndbcluster.m4) @@ -2696,6 +2697,7 @@ MYSQL_CHECK_BDB MYSQL_CHECK_INNODB MYSQL_CHECK_EXAMPLEDB +MYSQL_CHECK_SPHINXDB MYSQL_CHECK_ARCHIVEDB MYSQL_CHECK_CSVDB MYSQL_CHECK_BLACKHOLEDB diff -r 319c65835581 libmysqld/Makefile.am --- a/libmysqld/Makefile.am Sun Jun 20 15:15:01 2010 +0400 +++ b/libmysqld/Makefile.am Sun Jun 20 15:59:31 2010 +0400 @@ -29,6 +29,7 @@ -I$(top_builddir)/include -I$(top_srcdir)/include \ -I$(top_builddir)/sql -I$(top_srcdir)/sql \ -I$(top_srcdir)/sql/examples \ + -I$(top_srcdir)/sql/sphinx \ -I$(top_srcdir)/regex \ $(openssl_includes) @ZLIB_INCLUDES@ @@ -39,6 +40,7 @@ libmysqlsources = errmsg.c get_password.c libmysql.c client.c pack.c \ my_time.c sqlexamplessources = ha_example.cc ha_tina.cc +sqlsphinxsources = ha_sphinx.cc noinst_HEADERS = embedded_priv.h emb_qcache.h @@ -67,7 +69,7 @@ parse_file.cc sql_view.cc sql_trigger.cc my_decimal.cc \ ha_blackhole.cc ha_archive.cc my_user.c -libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) +libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) $(sqlsphinxsources) libmysqld_a_SOURCES= # automake misses these @@ -147,12 +149,16 @@ rm -f $$f; \ @LN_CP_F@ $(top_srcdir)/sql/examples/$$f $$f; \ done; \ + for f in $(sqlsphinxsources); do \ + rm -f $$f; \ + @LN_CP_F@ $(top_srcdir)/sql/sphinx/$$f $$f; \ + done; \ rm -f client_settings.h; \ @LN_CP_F@ $(top_srcdir)/libmysql/client_settings.h client_settings.h clean-local: - rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) | sed "s;\.lo;.c;g"` \ + rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) $(sqlsphinxsources) | sed "s;\.lo;.c;g"` \ $(top_srcdir)/linked_libmysqld_sources; \ rm -f client_settings.h diff -r 319c65835581 sql/CMakeLists.txt --- a/sql/CMakeLists.txt Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/CMakeLists.txt Sun Jun 20 15:59:31 2010 +0400 @@ -50,6 +50,7 @@ filesort.cc gstream.cc ha_blackhole.cc ha_archive.cc ha_heap.cc ha_myisam.cc ha_myisammrg.cc ha_innodb.cc ha_federated.cc ha_berkeley.cc + sphinx/ha_sphinx.cc handler.cc hash_filo.cc hash_filo.h hostname.cc init.cc item.cc item_buff.cc item_cmpfunc.cc item_create.cc item_func.cc item_geofunc.cc item_row.cc diff -r 319c65835581 sql/Makefile.am --- a/sql/Makefile.am Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/Makefile.am Sun Jun 20 15:59:31 2010 +0400 @@ -68,6 +68,7 @@ sql_array.h sql_cursor.h \ examples/ha_example.h ha_archive.h \ examples/ha_tina.h ha_blackhole.h \ + sphinx/ha_sphinx.h \ ha_federated.h mysqld_SOURCES = sql_lex.cc sql_handler.cc \ item.cc item_sum.cc item_buff.cc item_func.cc \ @@ -105,6 +106,7 @@ sp_cache.cc parse_file.cc sql_trigger.cc \ examples/ha_example.cc ha_archive.cc \ examples/ha_tina.cc ha_blackhole.cc \ + sphinx/ha_sphinx.cc \ ha_federated.cc gen_lex_hash_SOURCES = gen_lex_hash.cc @@ -175,6 +177,10 @@ udf_example_la_SOURCES= udf_example.c udf_example_la_LDFLAGS= -module -rpath $(pkglibdir) +pkglib_LTLIBRARIES = sphinx/sphinx.la +sphinx_sphinx_la_SOURCES = sphinx/snippets_udf.cc +sphinx_sphinx_la_LDFLAGS = -module + # Don't update the files from bitkeeper %::SCCS/s.% diff -r 319c65835581 sql/handler.cc --- a/sql/handler.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/handler.cc Sun Jun 20 15:59:31 2010 +0400 @@ -77,6 +77,15 @@ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, HTON_NO_FLAGS }; #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +extern handlerton sphinx_hton; +#else +handlerton sphinx_hton = { "SPHINX", SHOW_OPTION_NO, "SPHINX storage engine", + DB_TYPE_SPHINX_DB, NULL, 0, 0, NULL, NULL, + NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, + HTON_NO_FLAGS }; +#endif #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" extern handlerton innobase_hton; @@ -141,6 +150,7 @@ &example_hton, &archive_hton, &tina_hton, + &sphinx_hton, &ndbcluster_hton, &federated_hton, &myisammrg_hton, @@ -342,6 +352,12 @@ return new (alloc) ha_tina(table); return NULL; #endif +#ifdef HAVE_SPHINX_DB + case DB_TYPE_SPHINX_DB: + if (have_sphinx_db == SHOW_OPTION_YES) + return new (alloc) ha_sphinx(table); + return NULL; +#endif #ifdef HAVE_NDBCLUSTER_DB case DB_TYPE_NDBCLUSTER: if (have_ndbcluster == SHOW_OPTION_YES) diff -r 319c65835581 sql/handler.h --- a/sql/handler.h Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/handler.h Sun Jun 20 15:59:31 2010 +0400 @@ -186,8 +186,9 @@ DB_TYPE_BERKELEY_DB, DB_TYPE_INNODB, DB_TYPE_GEMINI, DB_TYPE_NDBCLUSTER, DB_TYPE_EXAMPLE_DB, DB_TYPE_ARCHIVE_DB, DB_TYPE_CSV_DB, - DB_TYPE_FEDERATED_DB, + DB_TYPE_FEDERATED_DB, DB_TYPE_BLACKHOLE_DB, + DB_TYPE_SPHINX_DB, DB_TYPE_DEFAULT // Must be last }; diff -r 319c65835581 sql/mysql_priv.h --- a/sql/mysql_priv.h Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/mysql_priv.h Sun Jun 20 15:59:31 2010 +0400 @@ -1462,6 +1462,12 @@ #else extern SHOW_COMP_OPTION have_csv_db; #endif +#ifdef HAVE_SPHINX_DB +extern handlerton sphinx_hton; +#define have_sphinx_db sphinx_hton.state +#else +extern SHOW_COMP_OPTION have_sphinx_db; +#endif #ifdef HAVE_FEDERATED_DB extern handlerton federated_hton; #define have_federated_db federated_hton.state diff -r 319c65835581 sql/mysqld.cc --- a/sql/mysqld.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/mysqld.cc Sun Jun 20 15:59:31 2010 +0400 @@ -36,6 +36,10 @@ #include #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif + #ifdef HAVE_INNOBASE_DB #define OPT_INNODB_DEFAULT 1 #else @@ -6721,6 +6725,13 @@ #ifdef COMMUNITY_SERVER {"Uptime_since_flush_status",(char*) 0, SHOW_FLUSHTIME}, #endif +#ifdef HAVE_SPHINX_DB + {"sphinx_total", (char *)sphinx_showfunc_total, SHOW_SPHINX_FUNC}, + {"sphinx_total_found", (char *)sphinx_showfunc_total_found, SHOW_SPHINX_FUNC}, + {"sphinx_time", (char *)sphinx_showfunc_time, SHOW_SPHINX_FUNC}, + {"sphinx_word_count", (char *)sphinx_showfunc_word_count, SHOW_SPHINX_FUNC}, + {"sphinx_words", (char *)sphinx_showfunc_words, SHOW_SPHINX_FUNC}, +#endif {NullS, NullS, SHOW_LONG} }; @@ -6964,6 +6975,11 @@ #else have_csv_db= SHOW_OPTION_NO; #endif +#ifdef HAVE_SPHINX_DB + have_sphinx_db= SHOW_OPTION_YES; +#else + have_sphinx_db= SHOW_OPTION_NO; +#endif #ifdef HAVE_NDBCLUSTER_DB have_ndbcluster=SHOW_OPTION_DISABLED; #else @@ -8087,6 +8103,7 @@ #undef have_example_db #undef have_archive_db #undef have_csv_db +#undef have_sphinx_db #undef have_federated_db #undef have_partition_db #undef have_blackhole_db @@ -8097,6 +8114,7 @@ SHOW_COMP_OPTION have_example_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_archive_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_csv_db= SHOW_OPTION_NO; +SHOW_COMP_OPTION have_sphinx_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_federated_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_partition_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_blackhole_db= SHOW_OPTION_NO; diff -r 319c65835581 sql/set_var.cc --- a/sql/set_var.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/set_var.cc Sun Jun 20 15:59:31 2010 +0400 @@ -913,6 +913,7 @@ {"have_profiling", (char*) &have_profiling, SHOW_HAVE}, {"have_crypt", (char*) &have_crypt, SHOW_HAVE}, {"have_csv", (char*) &have_csv_db, SHOW_HAVE}, + {"have_sphinx", (char*) &have_sphinx_db, SHOW_HAVE}, {"have_dynamic_loading", (char*) &have_dlopen, SHOW_HAVE}, {"have_example_engine", (char*) &have_example_db, SHOW_HAVE}, {"have_federated_engine", (char*) &have_federated_db, SHOW_HAVE}, diff -r 319c65835581 sql/sql_lex.h --- a/sql/sql_lex.h Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/sql_lex.h Sun Jun 20 15:59:31 2010 +0400 @@ -57,6 +57,7 @@ SQLCOM_SHOW_DATABASES, SQLCOM_SHOW_TABLES, SQLCOM_SHOW_FIELDS, SQLCOM_SHOW_KEYS, SQLCOM_SHOW_VARIABLES, SQLCOM_SHOW_LOGS, SQLCOM_SHOW_STATUS, SQLCOM_SHOW_INNODB_STATUS, SQLCOM_SHOW_NDBCLUSTER_STATUS, SQLCOM_SHOW_MUTEX_STATUS, + SQLCOM_SHOW_SPHINX_STATUS, SQLCOM_SHOW_PROCESSLIST, SQLCOM_SHOW_MASTER_STAT, SQLCOM_SHOW_SLAVE_STAT, SQLCOM_SHOW_GRANTS, SQLCOM_SHOW_CREATE, SQLCOM_SHOW_CHARSETS, SQLCOM_SHOW_COLLATIONS, SQLCOM_SHOW_CREATE_DB, SQLCOM_SHOW_TABLE_STATUS, diff -r 319c65835581 sql/sql_parse.cc --- a/sql/sql_parse.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/sql_parse.cc Sun Jun 20 15:59:31 2010 +0400 @@ -24,6 +24,9 @@ #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif #ifdef HAVE_NDBCLUSTER_DB #include "ha_ndbcluster.h" @@ -3166,6 +3169,15 @@ break; } #endif +#ifdef HAVE_SPHINX_DB + case SQLCOM_SHOW_SPHINX_STATUS: + { + if (check_global_access(thd, SUPER_ACL)) + goto error; + res = sphinx_show_status(thd); + break; + } +#endif #ifdef HAVE_REPLICATION case SQLCOM_LOAD_MASTER_TABLE: { diff -r 319c65835581 sql/sql_show.cc --- a/sql/sql_show.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/sql_show.cc Sun Jun 20 15:59:31 2010 +0400 @@ -1500,6 +1500,16 @@ value= (char*) var->value_ptr(thd, value_type, &null_lex_str); charset= var->charset(thd); } + #ifdef HAVE_SPHINX_DB + else if (show_type == SHOW_SPHINX_FUNC) + { + SHOW_VAR var; + ((int (*)(THD *, SHOW_VAR *, char *))value)(thd, &var, buff); + + value = var.value; + show_type = var.type; + } + #endif /* HAVE_SPHINX_DB */ pos= end= buff; switch (show_type) { diff -r 319c65835581 sql/sql_yacc.yy --- a/sql/sql_yacc.yy Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/sql_yacc.yy Sun Jun 20 15:59:31 2010 +0400 @@ -8342,6 +8342,9 @@ case DB_TYPE_INNODB: Lex->sql_command = SQLCOM_SHOW_INNODB_STATUS; break; + case DB_TYPE_SPHINX_DB: + Lex->sql_command = SQLCOM_SHOW_SPHINX_STATUS; + break; default: my_error(ER_NOT_SUPPORTED_YET, MYF(0), "STATUS"); MYSQL_YYABORT; diff -r 319c65835581 sql/sql_yacc.cc --- a/sql/sql_yacc.cc Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/sql_yacc.cc Sun Jun 20 15:59:31 2010 +0400 @@ -27003,6 +27003,9 @@ case DB_TYPE_INNODB: Lex->sql_command = SQLCOM_SHOW_INNODB_STATUS; break; + case DB_TYPE_SPHINX_DB: + Lex->sql_command = SQLCOM_SHOW_SPHINX_STATUS; + break; default: my_error(ER_NOT_SUPPORTED_YET, MYF(0), "STATUS"); MYSQL_YYABORT; diff -r 319c65835581 sql/structs.h --- a/sql/structs.h Sun Jun 20 15:15:01 2010 +0400 +++ b/sql/structs.h Sun Jun 20 15:59:31 2010 +0400 @@ -194,6 +194,9 @@ SHOW_SSL_CTX_SESS_TIMEOUTS, SHOW_SSL_CTX_SESS_CACHE_FULL, SHOW_SSL_GET_CIPHER_LIST, #endif /* HAVE_OPENSSL */ +#ifdef HAVE_SPHINX_DB + SHOW_SPHINX_FUNC, +#endif SHOW_NET_COMPRESSION, SHOW_RPL_STATUS, SHOW_SLAVE_RUNNING, SHOW_SLAVE_RETRIED_TRANS, SHOW_KEY_CACHE_LONG, SHOW_KEY_CACHE_CONST_LONG, SHOW_KEY_CACHE_LONGLONG, diff -r 319c65835581 win/configure.js --- a/win/configure.js Sun Jun 20 15:15:01 2010 +0400 +++ b/win/configure.js Sun Jun 20 15:59:31 2010 +0400 @@ -45,6 +45,7 @@ case "WITH_EXAMPLE_STORAGE_ENGINE": case "WITH_FEDERATED_STORAGE_ENGINE": case "WITH_INNOBASE_STORAGE_ENGINE": + case "WITH_SPHINX_STORAGE_ENGINE": case "__NT__": case "DISABLE_GRANT_OPTIONS": case "EMBED_MANIFESTS": --- mysql-5.0.67/config/ac-macros/ha_sphinx.m4 1970-01-01 10:00:00.000000000 +1000 +++ mysql-5.0.67-sphinx/config/ac-macros/ha_sphinx.m4 2009-02-14 09:15:48.000000000 +1000 @@ -0,0 +1,30 @@ +dnl --------------------------------------------------------------------------- +dnl Macro: MYSQL_CHECK_EXAMPLEDB +dnl Sets HAVE_SPHINX_DB if --with-sphinx-storage-engine is used +dnl --------------------------------------------------------------------------- +AC_DEFUN([MYSQL_CHECK_SPHINXDB], [ + AC_ARG_WITH([sphinx-storage-engine], + [ + --with-sphinx-storage-engine + Enable the Sphinx Storage Engine], + [sphinxdb="$withval"], + [sphinxdb=no]) + AC_MSG_CHECKING([for example storage engine]) + + case "$sphinxdb" in + yes ) + AC_DEFINE([HAVE_SPHINX_DB], [1], [Builds Sphinx Engine]) + AC_MSG_RESULT([yes]) + [sphinxdb=yes] + ;; + * ) + AC_MSG_RESULT([no]) + [sphinxdb=no] + ;; + esac + +]) +dnl --------------------------------------------------------------------------- +dnl END OF MYSQL_CHECK_EXAMPLE SECTION +dnl --------------------------------------------------------------------------- + sphinx-2.0.4-release/mysqlse/sphinx.5.0.37.diff0000644000176700017710000002734211145546726020357 0ustar deogardeogar--- mysql-5.0.67/config/ac-macros/ha_sphinx.m4 1970-01-01 10:00:00.000000000 +1000 +++ mysql-5.0.67-sphinx/config/ac-macros/ha_sphinx.m4 2009-02-14 09:15:48.000000000 +1000 @@ -0,0 +1,30 @@ +dnl --------------------------------------------------------------------------- +dnl Macro: MYSQL_CHECK_EXAMPLEDB +dnl Sets HAVE_SPHINX_DB if --with-sphinx-storage-engine is used +dnl --------------------------------------------------------------------------- +AC_DEFUN([MYSQL_CHECK_SPHINXDB], [ + AC_ARG_WITH([sphinx-storage-engine], + [ + --with-sphinx-storage-engine + Enable the Sphinx Storage Engine], + [sphinxdb="$withval"], + [sphinxdb=no]) + AC_MSG_CHECKING([for example storage engine]) + + case "$sphinxdb" in + yes ) + AC_DEFINE([HAVE_SPHINX_DB], [1], [Builds Sphinx Engine]) + AC_MSG_RESULT([yes]) + [sphinxdb=yes] + ;; + * ) + AC_MSG_RESULT([no]) + [sphinxdb=no] + ;; + esac + +]) +dnl --------------------------------------------------------------------------- +dnl END OF MYSQL_CHECK_EXAMPLE SECTION +dnl --------------------------------------------------------------------------- + --- mysql-5.0.67/configure.in 2008-08-04 23:19:07.000000000 +1100 +++ mysql-5.0.67-sphinx/configure.in 2009-02-14 09:15:48.000000000 +1000 @@ -58,6 +58,7 @@ sinclude(config/ac-macros/ha_berkeley.m4) sinclude(config/ac-macros/ha_blackhole.m4) sinclude(config/ac-macros/ha_example.m4) +sinclude(config/ac-macros/ha_sphinx.m4) sinclude(config/ac-macros/ha_federated.m4) sinclude(config/ac-macros/ha_innodb.m4) sinclude(config/ac-macros/ha_ndbcluster.m4) @@ -2625,6 +2626,7 @@ MYSQL_CHECK_BDB MYSQL_CHECK_INNODB MYSQL_CHECK_EXAMPLEDB +MYSQL_CHECK_SPHINXDB MYSQL_CHECK_ARCHIVEDB MYSQL_CHECK_CSVDB MYSQL_CHECK_BLACKHOLEDB --- mysql-5.0.67/libmysqld/Makefile.am 2008-08-04 23:19:18.000000000 +1100 +++ mysql-5.0.67-sphinx/libmysqld/Makefile.am 2009-02-14 09:15:48.000000000 +1000 @@ -29,6 +29,7 @@ -I$(top_builddir)/include -I$(top_srcdir)/include \ -I$(top_builddir)/sql -I$(top_srcdir)/sql \ -I$(top_srcdir)/sql/examples \ + -I$(top_srcdir)/sql/sphinx \ -I$(top_srcdir)/regex \ $(openssl_includes) @ZLIB_INCLUDES@ @@ -39,6 +40,7 @@ libmysqlsources = errmsg.c get_password.c libmysql.c client.c pack.c \ my_time.c sqlexamplessources = ha_example.cc ha_tina.cc +sqlsphinxsources = ha_sphinx.cc noinst_HEADERS = embedded_priv.h emb_qcache.h @@ -67,7 +69,7 @@ parse_file.cc sql_view.cc sql_trigger.cc my_decimal.cc \ ha_blackhole.cc ha_archive.cc my_user.c -libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) +libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) $(sqlsphinxsources) libmysqld_a_SOURCES= # automake misses these @@ -147,12 +149,16 @@ rm -f $$f; \ @LN_CP_F@ $(top_srcdir)/sql/examples/$$f $$f; \ done; \ + for f in $(sqlsphinxsources); do \ + rm -f $$f; \ + @LN_CP_F@ $(top_srcdir)/sql/sphinx/$$f $$f; \ + done; \ rm -f client_settings.h; \ @LN_CP_F@ $(top_srcdir)/libmysql/client_settings.h client_settings.h clean-local: - rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) | sed "s;\.lo;.c;g"` \ + rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) $(sqlsphinxsources) | sed "s;\.lo;.c;g"` \ $(top_srcdir)/linked_libmysqld_sources; \ rm -f client_settings.h --- mysql-5.0.67/sql/handler.cc 2008-08-04 23:20:04.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/handler.cc 2009-02-14 09:15:48.000000000 +1000 @@ -77,6 +77,15 @@ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, HTON_NO_FLAGS }; #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +extern handlerton sphinx_hton; +#else +handlerton sphinx_hton = { "SPHINX", SHOW_OPTION_NO, "SPHINX storage engine", + DB_TYPE_SPHINX_DB, NULL, 0, 0, NULL, NULL, + NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, + HTON_NO_FLAGS }; +#endif #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" extern handlerton innobase_hton; @@ -141,6 +150,7 @@ &example_hton, &archive_hton, &tina_hton, + &sphinx_hton, &ndbcluster_hton, &federated_hton, &myisammrg_hton, @@ -341,6 +351,12 @@ return new (alloc) ha_tina(table); return NULL; #endif +#ifdef HAVE_SPHINX_DB + case DB_TYPE_SPHINX_DB: + if (have_sphinx_db == SHOW_OPTION_YES) + return new (alloc) ha_sphinx(table); + return NULL; +#endif #ifdef HAVE_NDBCLUSTER_DB case DB_TYPE_NDBCLUSTER: if (have_ndbcluster == SHOW_OPTION_YES) --- mysql-5.0.67/sql/handler.h 2008-08-04 23:20:04.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/handler.h 2009-02-14 09:15:48.000000000 +1000 @@ -186,8 +186,9 @@ DB_TYPE_BERKELEY_DB, DB_TYPE_INNODB, DB_TYPE_GEMINI, DB_TYPE_NDBCLUSTER, DB_TYPE_EXAMPLE_DB, DB_TYPE_ARCHIVE_DB, DB_TYPE_CSV_DB, - DB_TYPE_FEDERATED_DB, + DB_TYPE_FEDERATED_DB, DB_TYPE_BLACKHOLE_DB, + DB_TYPE_SPHINX_DB, DB_TYPE_DEFAULT // Must be last }; --- mysql-5.0.67/sql/Makefile.am 2008-08-04 23:20:02.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/Makefile.am 2009-02-14 09:23:28.000000000 +1000 @@ -68,6 +68,7 @@ sql_array.h sql_cursor.h \ examples/ha_example.h ha_archive.h \ examples/ha_tina.h ha_blackhole.h \ + sphinx/ha_sphinx.h \ ha_federated.h mysqld_SOURCES = sql_lex.cc sql_handler.cc \ item.cc item_sum.cc item_buff.cc item_func.cc \ @@ -105,6 +106,7 @@ sp_cache.cc parse_file.cc sql_trigger.cc \ examples/ha_example.cc ha_archive.cc \ examples/ha_tina.cc ha_blackhole.cc \ + sphinx/ha_sphinx.cc \ ha_federated.cc gen_lex_hash_SOURCES = gen_lex_hash.cc @@ -174,6 +176,10 @@ udf_example_la_SOURCES= udf_example.c udf_example_la_LDFLAGS= -module -rpath $(pkglibdir) +pkglib_LTLIBRARIES = sphinx/sphinx.la +sphinx_sphinx_la_SOURCES = sphinx/snippets_udf.cc +sphinx_sphinx_la_LDFLAGS = -module + # Don't update the files from bitkeeper %::SCCS/s.% --- mysql-5.0.67/sql/mysqld.cc 2008-08-04 23:20:07.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/mysqld.cc 2009-02-14 09:15:48.000000000 +1000 @@ -36,6 +36,10 @@ #include #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif + #ifdef HAVE_INNOBASE_DB #define OPT_INNODB_DEFAULT 1 #else @@ -6633,6 +6637,13 @@ {"Threads_running", (char*) &thread_running, SHOW_INT_CONST}, {"Uptime", (char*) 0, SHOW_STARTTIME}, {"Uptime_since_flush_status",(char*) 0, SHOW_FLUSHTIME}, +#ifdef HAVE_SPHINX_DB + {"sphinx_total", (char *)sphinx_showfunc_total, SHOW_SPHINX_FUNC}, + {"sphinx_total_found", (char *)sphinx_showfunc_total_found, SHOW_SPHINX_FUNC}, + {"sphinx_time", (char *)sphinx_showfunc_time, SHOW_SPHINX_FUNC}, + {"sphinx_word_count", (char *)sphinx_showfunc_word_count, SHOW_SPHINX_FUNC}, + {"sphinx_words", (char *)sphinx_showfunc_words, SHOW_SPHINX_FUNC}, +#endif {NullS, NullS, SHOW_LONG} }; @@ -6875,6 +6886,11 @@ #else have_csv_db= SHOW_OPTION_NO; #endif +#ifdef HAVE_SPHINX_DB + have_sphinx_db= SHOW_OPTION_YES; +#else + have_sphinx_db= SHOW_OPTION_NO; +#endif #ifdef HAVE_NDBCLUSTER_DB have_ndbcluster=SHOW_OPTION_DISABLED; #else @@ -7983,6 +7999,7 @@ #undef have_example_db #undef have_archive_db #undef have_csv_db +#undef have_sphinx_db #undef have_federated_db #undef have_partition_db #undef have_blackhole_db @@ -7993,6 +8010,7 @@ SHOW_COMP_OPTION have_example_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_archive_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_csv_db= SHOW_OPTION_NO; +SHOW_COMP_OPTION have_sphinx_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_federated_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_partition_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_blackhole_db= SHOW_OPTION_NO; --- mysql-5.0.67/sql/mysql_priv.h 2008-08-04 23:20:07.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/mysql_priv.h 2009-02-14 09:15:48.000000000 +1000 @@ -1439,6 +1439,12 @@ #else extern SHOW_COMP_OPTION have_csv_db; #endif +#ifdef HAVE_SPHINX_DB +extern handlerton sphinx_hton; +#define have_sphinx_db sphinx_hton.state +#else +extern SHOW_COMP_OPTION have_sphinx_db; +#endif #ifdef HAVE_FEDERATED_DB extern handlerton federated_hton; #define have_federated_db federated_hton.state --- mysql-5.0.67/sql/set_var.cc 2008-08-04 23:20:08.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/set_var.cc 2009-02-14 09:15:48.000000000 +1000 @@ -888,6 +888,7 @@ {"have_compress", (char*) &have_compress, SHOW_HAVE}, {"have_crypt", (char*) &have_crypt, SHOW_HAVE}, {"have_csv", (char*) &have_csv_db, SHOW_HAVE}, + {"have_sphinx", (char*) &have_sphinx_db, SHOW_HAVE}, {"have_dynamic_loading", (char*) &have_dlopen, SHOW_HAVE}, {"have_example_engine", (char*) &have_example_db, SHOW_HAVE}, {"have_federated_engine", (char*) &have_federated_db, SHOW_HAVE}, --- mysql-5.0.67/sql/sql_lex.h 2008-08-04 23:20:10.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/sql_lex.h 2009-02-14 09:15:48.000000000 +1000 @@ -57,6 +57,7 @@ SQLCOM_SHOW_DATABASES, SQLCOM_SHOW_TABLES, SQLCOM_SHOW_FIELDS, SQLCOM_SHOW_KEYS, SQLCOM_SHOW_VARIABLES, SQLCOM_SHOW_LOGS, SQLCOM_SHOW_STATUS, SQLCOM_SHOW_INNODB_STATUS, SQLCOM_SHOW_NDBCLUSTER_STATUS, SQLCOM_SHOW_MUTEX_STATUS, + SQLCOM_SHOW_SPHINX_STATUS, SQLCOM_SHOW_PROCESSLIST, SQLCOM_SHOW_MASTER_STAT, SQLCOM_SHOW_SLAVE_STAT, SQLCOM_SHOW_GRANTS, SQLCOM_SHOW_CREATE, SQLCOM_SHOW_CHARSETS, SQLCOM_SHOW_COLLATIONS, SQLCOM_SHOW_CREATE_DB, SQLCOM_SHOW_TABLE_STATUS, --- mysql-5.0.67/sql/sql_parse.cc 2008-08-04 23:20:10.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/sql_parse.cc 2009-02-14 09:15:48.000000000 +1000 @@ -24,6 +24,9 @@ #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif #ifdef HAVE_NDBCLUSTER_DB #include "ha_ndbcluster.h" @@ -3006,6 +3009,15 @@ break; } #endif +#ifdef HAVE_SPHINX_DB + case SQLCOM_SHOW_SPHINX_STATUS: + { + if (check_global_access(thd, SUPER_ACL)) + goto error; + res = sphinx_show_status(thd); + break; + } +#endif #ifdef HAVE_REPLICATION case SQLCOM_LOAD_MASTER_TABLE: { --- mysql-5.0.67/sql/sql_yacc.yy 2008-08-04 23:20:12.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/sql_yacc.yy 2009-02-14 09:15:48.000000000 +1000 @@ -7393,6 +7393,9 @@ case DB_TYPE_INNODB: Lex->sql_command = SQLCOM_SHOW_INNODB_STATUS; break; + case DB_TYPE_SPHINX_DB: + Lex->sql_command = SQLCOM_SHOW_SPHINX_STATUS; + break; default: my_error(ER_NOT_SUPPORTED_YET, MYF(0), "STATUS"); MYSQL_YYABORT; --- mysql-5.0.67/sql/structs.h 2008-08-04 23:20:12.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/structs.h 2009-02-14 09:15:48.000000000 +1000 @@ -188,6 +188,9 @@ SHOW_SSL_CTX_SESS_TIMEOUTS, SHOW_SSL_CTX_SESS_CACHE_FULL, SHOW_SSL_GET_CIPHER_LIST, #endif /* HAVE_OPENSSL */ +#ifdef HAVE_SPHINX_DB + SHOW_SPHINX_FUNC, +#endif SHOW_NET_COMPRESSION, SHOW_RPL_STATUS, SHOW_SLAVE_RUNNING, SHOW_SLAVE_RETRIED_TRANS, SHOW_KEY_CACHE_LONG, SHOW_KEY_CACHE_CONST_LONG, SHOW_KEY_CACHE_LONGLONG, --- mysql-5.0.67/sql/sql_show.cc 2008-08-04 23:20:11.000000000 +1100 +++ mysql-5.0.67-sphinx/sql/sql_show.cc 2009-02-14 09:15:48.000000000 +1000 @@ -1473,6 +1473,16 @@ value= (char*) ((sys_var*) value)->value_ptr(thd, value_type, &null_lex_str); } + #ifdef HAVE_SPHINX_DB + else if (show_type == SHOW_SPHINX_FUNC) + { + SHOW_VAR var; + ((int (*)(THD *, SHOW_VAR *, char *))value)(thd, &var, buff); + + value = var.value; + show_type = var.type; + } + #endif /* HAVE_SPHINX_DB */ pos= end= buff; switch (show_type) { sphinx-2.0.4-release/mysqlse/plug.in0000644000176700017710000000037210441500053016731 0ustar deogardeogarMYSQL_STORAGE_ENGINE(sphinx,, [Sphinx Storage Engine], [Sphinx Storage Engines], [max,max-no-ndb]) MYSQL_PLUGIN_DIRECTORY(sphinx, [storage/sphinx]) MYSQL_PLUGIN_STATIC(sphinx, [libsphinx.a]) MYSQL_PLUGIN_DYNAMIC(sphinx, [ha_sphinx.la]) sphinx-2.0.4-release/mysqlse/CMakeLists.txt0000644000176700017710000000117111624275006020203 0ustar deogardeogarSET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -DSAFEMALLOC -DSAFE_MUTEX") SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -DSAFEMALLOC -DSAFE_MUTEX") ADD_DEFINITIONS(-DMYSQL_SERVER) INCLUDE_DIRECTORIES(${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/sql ${CMAKE_SOURCE_DIR}/extra/yassl/include ${CMAKE_SOURCE_DIR}/regex) SET(SPHINX_SOURCES ha_sphinx.cc) IF(MYSQL_VERSION_ID LESS 50515) ADD_LIBRARY(sphinx ha_sphinx.cc) ELSE() SET(SPHINX_PLUGIN_DYNAMIC "ha_sphinx") MYSQL_ADD_PLUGIN(sphinx ${SPHINX_SOURCES} STORAGE_ENGINE MODULE_ONLY LINK_LIBRARIES mysys) ENDIF() sphinx-2.0.4-release/mysqlse/snippets_udf.cc0000644000176700017710000004446111711621267020467 0ustar deogardeogar// // $Id: snippets_udf.cc 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License. You should have // received a copy of the GPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include #include #include #include #include #include #if MYSQL_VERSION_ID>50100 #include "mysql_priv.h" #include #else #include "../mysql_priv.h" #endif #include #include #if MYSQL_VERSION_ID>=50120 typedef uchar byte; #endif /// partially copy-pasted stuff that should be moved elsewhere #if UNALIGNED_RAM_ACCESS /// pass-through wrapper template < typename T > inline T sphUnalignedRead ( const T & tRef ) { return tRef; } /// pass-through wrapper template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { *(T*)pPtr = tVal; } #else /// unaligned read wrapper for some architectures (eg. SPARC) template < typename T > inline T sphUnalignedRead ( const T & tRef ) { T uTmp; byte * pSrc = (byte *) &tRef; byte * pDst = (byte *) &uTmp; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; return uTmp; } /// unaligned write wrapper for some architectures (eg. SPARC) template < typename T > void sphUnalignedWrite ( void * pPtr, const T & tVal ) { byte * pDst = (byte *) pPtr; byte * pSrc = (byte *) &tVal; for ( int i=0; i<(int)sizeof(T); i++ ) *pDst++ = *pSrc++; } #endif #define SPHINXSE_MAX_ALLOC (16*1024*1024) #define SafeDelete(_arg) { if ( _arg ) delete ( _arg ); (_arg) = NULL; } #define SafeDeleteArray(_arg) { if ( _arg ) delete [] ( _arg ); (_arg) = NULL; } #define Min(a,b) ((a)<(b)?(a):(b)) typedef unsigned int DWORD; inline DWORD sphF2DW ( float f ) { union { float f; uint32 d; } u; u.f = f; return u.d; } static char * sphDup ( const char * sSrc, int iLen=-1 ) { if ( !sSrc ) return NULL; if ( iLen<0 ) iLen = strlen(sSrc); char * sRes = new char [ 1+iLen ]; memcpy ( sRes, sSrc, iLen ); sRes[iLen] = '\0'; return sRes; } static inline void sphShowErrno ( const char * sCall ) { char sError[256]; snprintf ( sError, sizeof(sError), "%s() failed: [%d] %s", sCall, errno, strerror(errno) ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sError ); } static const bool sphReportErrors = true; static bool sphSend ( int iFd, const char * pBuffer, int iSize, bool bReportErrors = false ) { assert ( pBuffer ); assert ( iSize > 0 ); const int iResult = send ( iFd, pBuffer, iSize, 0 ); if ( iResult!=iSize ) { if ( bReportErrors ) sphShowErrno("send"); return false; } return true; } static bool sphRecv ( int iFd, char * pBuffer, int iSize, bool bReportErrors = false ) { assert ( pBuffer ); assert ( iSize > 0 ); while ( iSize ) { const int iResult = recv ( iFd, pBuffer, iSize, 0 ); if ( iResult > 0 ) { iSize -= iResult; pBuffer += iSize; } else if ( iResult==0 ) { if ( bReportErrors ) my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), "recv() failed: disconnected" ); return false; } else { if ( bReportErrors ) sphShowErrno("recv"); return false; } } return true; } enum { SPHINX_SEARCHD_PROTO = 1, SEARCHD_COMMAND_EXCERPT = 1, VER_COMMAND_EXCERPT = 0x103, }; /// known answers enum { SEARCHD_OK = 0, ///< general success, command-specific reply follows SEARCHD_ERROR = 1, ///< general failure, error message follows SEARCHD_RETRY = 2, ///< temporary failure, error message follows, client should retry later SEARCHD_WARNING = 3 ///< general success, warning message and command-specific reply follow }; #define SPHINXSE_DEFAULT_SCHEME "sphinx" #define SPHINXSE_DEFAULT_HOST "127.0.0.1" #define SPHINXSE_DEFAULT_PORT 9312 #define SPHINXSE_DEFAULT_INDEX "*" class CSphBuffer { private: bool m_bOverrun; int m_iSize; int m_iLeft; char * m_pBuffer; char * m_pCurrent; public: explicit CSphBuffer ( const int iSize ) : m_bOverrun ( false ) , m_iSize ( iSize ) , m_iLeft ( iSize ) { assert ( iSize > 0 ); m_pBuffer = new char[iSize]; m_pCurrent = m_pBuffer; } ~CSphBuffer () { SafeDeleteArray ( m_pBuffer ); } const char * Ptr() const { return m_pBuffer; } bool Finalize() { return !( m_bOverrun || m_iLeft!=0 || ( m_pCurrent - m_pBuffer )!=m_iSize ); } void SendBytes ( const void * pBytes, int iBytes ); void SendWord ( short int v ) { v = ntohs(v); SendBytes ( &v, sizeof(v) ); } // NOLINT void SendInt ( int v ) { v = ntohl(v); SendBytes ( &v, sizeof(v) ); } void SendDword ( DWORD v ) { v = ntohl(v) ;SendBytes ( &v, sizeof(v) ); } void SendUint64 ( ulonglong v ) { SendDword ( uint ( v>>32 ) ); SendDword ( uint ( v&0xFFFFFFFFUL ) ); } void SendString ( const char * v ) { SendString ( v, strlen(v) ); } void SendString ( const char * v, int iLen ) { SendDword(iLen); SendBytes ( v, iLen ); } void SendFloat ( float v ) { SendDword ( sphF2DW(v) ); } }; void CSphBuffer::SendBytes ( const void * pBytes, int iBytes ) { if ( m_iLeft < iBytes ) { m_bOverrun = true; return; } memcpy ( m_pCurrent, pBytes, iBytes ); m_pCurrent += iBytes; m_iLeft -= iBytes; } struct CSphUrl { char * m_sBuffer; char * m_sFormatted; char * m_sScheme; char * m_sHost; char * m_sIndex; int m_iPort; CSphUrl() : m_sBuffer ( NULL ) , m_sFormatted ( NULL ) , m_sScheme ( SPHINXSE_DEFAULT_SCHEME ) , m_sHost ( SPHINXSE_DEFAULT_HOST ) , m_sIndex ( SPHINXSE_DEFAULT_INDEX ) , m_iPort ( SPHINXSE_DEFAULT_PORT ) {} ~CSphUrl() { SafeDeleteArray ( m_sFormatted ); SafeDeleteArray ( m_sBuffer ); } bool Parse ( const char * sUrl, int iLen ); int Connect(); const char * Format(); }; const char * CSphUrl::Format() { if ( !m_sFormatted ) { int iSize = 15 + strlen(m_sHost) + strlen(m_sIndex); m_sFormatted = new char [ iSize ]; if ( m_iPort ) snprintf ( m_sFormatted, iSize, "inet://%s:%d/%s", m_sHost, m_iPort, m_sIndex ); else snprintf ( m_sFormatted, iSize, "unix://%s/%s", m_sHost, m_sIndex ); } return m_sFormatted; } // the following scheme variants are recognized // // inet://host/index // inet://host:port/index // unix://unix/domain/socket:index // unix://unix/domain/socket bool CSphUrl::Parse ( const char * sUrl, int iLen ) { bool bOk = true; while ( iLen ) { bOk = false; m_sBuffer = sphDup ( sUrl, iLen ); m_sScheme = m_sBuffer; m_sHost = strstr ( m_sBuffer, "://" ); if ( !m_sHost ) break; m_sHost[0] = '\0'; m_sHost += 2; if ( !strcmp ( m_sScheme, "unix" ) ) { // unix-domain socket m_iPort = 0; if (!( m_sIndex = strrchr ( m_sHost, ':' ) )) m_sIndex = SPHINXSE_DEFAULT_INDEX; else { *m_sIndex++ = '\0'; if ( !*m_sIndex ) m_sIndex = SPHINXSE_DEFAULT_INDEX; } bOk = true; break; } if ( strcmp ( m_sScheme, "sphinx" )!=0 && strcmp ( m_sScheme, "inet" )!=0 ) break; // inet m_sHost++; char * sPort = strchr ( m_sHost, ':' ); if ( sPort ) { *sPort++ = '\0'; if ( *sPort ) { m_sIndex = strchr ( sPort, '/' ); if ( m_sIndex ) *m_sIndex++ = '\0'; else m_sIndex = SPHINXSE_DEFAULT_INDEX; m_iPort = atoi(sPort); if ( !m_iPort ) m_iPort = SPHINXSE_DEFAULT_PORT; } } else { m_sIndex = strchr ( m_sHost, '/' ); if ( m_sIndex ) *m_sIndex++ = '\0'; else m_sIndex = SPHINXSE_DEFAULT_INDEX; } bOk = true; break; } return bOk; } int CSphUrl::Connect() { struct sockaddr_in sin; #ifndef __WIN__ struct sockaddr_un saun; #endif int iDomain = 0; int iSockaddrSize = 0; struct sockaddr * pSockaddr = NULL; in_addr_t ip_addr; if ( m_iPort ) { iDomain = AF_INET; iSockaddrSize = sizeof(sin); pSockaddr = (struct sockaddr *) &sin; memset ( &sin, 0, sizeof(sin) ); sin.sin_family = AF_INET; sin.sin_port = htons ( m_iPort ); // resolve address if ( (int)( ip_addr = inet_addr ( m_sHost ) )!=(int)INADDR_NONE ) memcpy ( &sin.sin_addr, &ip_addr, sizeof(ip_addr) ); else { int tmp_errno; struct hostent tmp_hostent, *hp; char buff2 [ GETHOSTBYNAME_BUFF_SIZE ]; hp = my_gethostbyname_r ( m_sHost, &tmp_hostent, buff2, sizeof(buff2), &tmp_errno ); if ( !hp ) { my_gethostbyname_r_free(); char sError[256]; snprintf ( sError, sizeof(sError), "failed to resolve searchd host (name=%s)", m_sHost ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); return -1; } memcpy ( &sin.sin_addr, hp->h_addr, Min ( sizeof(sin.sin_addr), (size_t)hp->h_length ) ); my_gethostbyname_r_free(); } } else { #ifndef __WIN__ iDomain = AF_UNIX; iSockaddrSize = sizeof(saun); pSockaddr = (struct sockaddr *) &saun; memset ( &saun, 0, sizeof(saun) ); saun.sun_family = AF_UNIX; strncpy ( saun.sun_path, m_sHost, sizeof(saun.sun_path)-1 ); #else my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), "Unix-domain sockets are not supported on Windows" ); return -1; #endif } // connect to searchd and exchange versions uint uServerVersion; uint uClientVersion = htonl ( SPHINX_SEARCHD_PROTO ); int iSocket = -1; char * pError = NULL; do { iSocket = socket ( iDomain, SOCK_STREAM, 0 ); if ( iSocket==-1 ) { pError = "Failed to create client socket"; break; } if ( connect ( iSocket, pSockaddr, iSockaddrSize )==-1 ) { pError = "Failed to connect to searchd"; break; } if ( !sphRecv ( iSocket, (char *)&uServerVersion, sizeof(uServerVersion) ) ) { pError = "Failed to receive searchd version"; break; } if ( !sphSend ( iSocket, (char *)&uClientVersion, sizeof(uClientVersion) ) ) { pError = "Failed to send client version"; break; } } while(0); // fixme: compare versions? if ( pError ) { char sError[1024]; snprintf ( sError, sizeof(sError), "%s [%d] %s", Format(), errno, strerror(errno) ); my_error ( ER_CONNECT_TO_FOREIGN_DATA_SOURCE, MYF(0), sError ); if ( iSocket!=-1 ) close ( iSocket ); return -1; } return iSocket; } struct CSphResponse { char * m_pBuffer; char * m_pBody; CSphResponse () : m_pBuffer ( NULL ) , m_pBody ( NULL ) {} explicit CSphResponse ( DWORD uSize ) : m_pBody ( NULL ) { m_pBuffer = new char[uSize]; } ~CSphResponse () { SafeDeleteArray ( m_pBuffer ); } static CSphResponse * Read ( int iSocket, int iClientVersion ); }; CSphResponse * CSphResponse::Read ( int iSocket, int iClientVersion ) { char sHeader[8]; if ( !sphRecv ( iSocket, sHeader, sizeof(sHeader) ) ) return NULL; int iStatus = ntohs ( sphUnalignedRead ( *(short int *) &sHeader[0] ) ); int iVersion = ntohs ( sphUnalignedRead ( *(short int *) &sHeader[2] ) ); DWORD uLength = ntohl ( sphUnalignedRead ( *(DWORD *) &sHeader[4] ) ); if ( iVersionm_pBuffer, uLength ) ) { SafeDelete ( pResponse ); return NULL; } pResponse->m_pBody = pResponse->m_pBuffer; if ( iStatus!=SEARCHD_OK ) { DWORD uSize = ntohl ( *(DWORD *)pResponse->m_pBuffer ); if ( iStatus==SEARCHD_WARNING ) { pResponse->m_pBody += uSize; // fixme: report the warning somehow } else { char * sMessage = sphDup ( pResponse->m_pBuffer + sizeof(DWORD), uSize ); my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), sMessage ); SafeDeleteArray ( sMessage ); SafeDelete ( pResponse ); return NULL; } } return pResponse; } return NULL; } /// udf extern "C" { my_bool sphinx_snippets_init ( UDF_INIT * pUDF, UDF_ARGS * pArgs, char * sMessage ); void sphinx_snippets_deinit ( UDF_INIT * pUDF ); char * sphinx_snippets ( UDF_INIT * pUDF, UDF_ARGS * pArgs, char * sResult, unsigned long * pLength, char * pIsNull, char * sError ); }; #define MAX_MESSAGE_LENGTH 255 #define MAX_RESULT_LENGTH 255 struct CSphSnippets { CSphUrl m_tUrl; CSphResponse * m_pResponse; int m_iBeforeMatch; int m_iAfterMatch; int m_iChunkSeparator; int m_iStripMode; int m_iPassageBoundary; int m_iLimit; int m_iLimitWords; int m_iLimitPassages; int m_iAround; int m_iPassageId; int m_iFlags; CSphSnippets() : m_pResponse(NULL) , m_iBeforeMatch(0) , m_iAfterMatch(0) , m_iChunkSeparator(0) , m_iStripMode(0) , m_iPassageBoundary(0) // defaults , m_iLimit(256) , m_iLimitWords(0) , m_iLimitPassages(0) , m_iAround(5) , m_iPassageId(1) , m_iFlags(1) { } ~CSphSnippets() { SafeDelete ( m_pResponse ); } }; #define KEYWORD(NAME) else if ( strncmp ( NAME, pArgs->attributes[i], pArgs->attribute_lengths[i] )==0 ) #define CHECK_TYPE(TYPE) \ if ( pArgs->arg_type[i]!=TYPE ) \ { \ snprintf ( sMessage, MAX_MESSAGE_LENGTH, \ "%.*s argument must be a string", \ (int)pArgs->attribute_lengths[i], \ pArgs->attributes[i] ); \ bFail = true; \ break; \ } \ if ( TYPE==STRING_RESULT && !pArgs->args[i] ) \ { \ snprintf ( sMessage, MAX_MESSAGE_LENGTH, \ "%.*s argument must be constant (and not NULL)", \ (int)pArgs->attribute_lengths[i], \ pArgs->attributes[i] ); \ bFail = true; \ break; \ } #define STRING CHECK_TYPE(STRING_RESULT) #define INT CHECK_TYPE(INT_RESULT); int iValue = *(long long *)pArgs->args[i] my_bool sphinx_snippets_init ( UDF_INIT * pUDF, UDF_ARGS * pArgs, char * sMessage ) { if ( pArgs->arg_count < 3 ) { strncpy ( sMessage, "insufficient arguments", MAX_MESSAGE_LENGTH ); return 1; } bool bFail = false; CSphSnippets * pOpts = new CSphSnippets; for ( uint i = 0; i < pArgs->arg_count; i++ ) { if ( i < 3 ) { if ( pArgs->arg_type[i]!=STRING_RESULT ) { strncpy ( sMessage, "first three arguments must be of string type", MAX_MESSAGE_LENGTH ); bFail = true; break; } } KEYWORD("sphinx") { STRING; if ( !pOpts->m_tUrl.Parse ( pArgs->args[i], pArgs->lengths[i] ) ) { strncpy ( sMessage, "failed to parse connection string", MAX_MESSAGE_LENGTH ); bFail = true; break; } } KEYWORD("before_match") { STRING; pOpts->m_iBeforeMatch = i; } KEYWORD("after_match") { STRING; pOpts->m_iAfterMatch = i; } KEYWORD("chunk_separator") { STRING; pOpts->m_iChunkSeparator = i; } KEYWORD("html_strip_mode") { STRING; pOpts->m_iStripMode = i; } KEYWORD("passage_boundary") { STRING; pOpts->m_iPassageBoundary = i; } KEYWORD("limit") { INT; pOpts->m_iLimit = iValue; } KEYWORD("limit_words") { INT; pOpts->m_iLimitWords = iValue; } KEYWORD("limit_passages") { INT; pOpts->m_iLimitPassages = iValue; } KEYWORD("around") { INT; pOpts->m_iAround = iValue; } KEYWORD("start_passage_id") { INT; pOpts->m_iPassageId = iValue; } KEYWORD("exact_phrase") { INT; if ( iValue ) pOpts->m_iFlags |= 2; } KEYWORD("single_passage") { INT; if ( iValue ) pOpts->m_iFlags |= 4; } KEYWORD("use_boundaries") { INT; if ( iValue ) pOpts->m_iFlags |= 8; } KEYWORD("weight_order") { INT; if ( iValue ) pOpts->m_iFlags |= 16; } KEYWORD("query_mode") { INT; if ( iValue ) pOpts->m_iFlags |= 32; } KEYWORD("force_all_words") { INT; if ( iValue ) pOpts->m_iFlags |= 64; } KEYWORD("load_files") { INT; if ( iValue ) pOpts->m_iFlags |= 128; } KEYWORD("allow_empty") { INT; if ( iValue ) pOpts->m_iFlags |= 256; } KEYWORD("emit_zones") { INT; if ( iValue ) pOpts->m_iFlags |= 512; } else { snprintf ( sMessage, MAX_MESSAGE_LENGTH, "unrecognized argument: %.*s", (int)pArgs->attribute_lengths[i], pArgs->attributes[i] ); bFail = true; break; } } if ( bFail ) { SafeDelete ( pOpts ); return 1; } pUDF->ptr = (char *)pOpts; return 0; } #undef STRING #undef INT #undef KEYWORD #undef CHECK_TYPE #define ARG(i) pArgs->args[i], pArgs->lengths[i] #define ARG_LEN(VAR, LEN) ( VAR ? pArgs->lengths[VAR] : LEN ) #define SEND_STRING(INDEX, DEFAULT) \ if ( INDEX ) \ tBuffer.SendString ( ARG(INDEX) ); \ else \ tBuffer.SendString ( DEFAULT, sizeof(DEFAULT) - 1 ); char * sphinx_snippets ( UDF_INIT * pUDF, UDF_ARGS * pArgs, char * sResult, unsigned long * pLength, char * pIsNull, char * pError ) { CSphSnippets * pOpts = (CSphSnippets *)pUDF->ptr; assert ( pOpts ); if ( !pArgs->args[0] || !pArgs->args[1] || !pArgs->args[2] ) { *pIsNull = 1; return sResult; } const int iSize = 68 + pArgs->lengths[1] + // index pArgs->lengths[2] + // words ARG_LEN ( pOpts->m_iBeforeMatch, 3 ) + ARG_LEN ( pOpts->m_iAfterMatch, 4 ) + ARG_LEN ( pOpts->m_iChunkSeparator, 5 ) + ARG_LEN ( pOpts->m_iStripMode, 5 ) + ARG_LEN ( pOpts->m_iPassageBoundary, 0 ) + 4 + pArgs->lengths[0]; // document CSphBuffer tBuffer(iSize); tBuffer.SendWord ( SEARCHD_COMMAND_EXCERPT ); tBuffer.SendWord ( VER_COMMAND_EXCERPT ); tBuffer.SendDword ( iSize - 8 ); tBuffer.SendDword ( 0 ); tBuffer.SendDword ( pOpts->m_iFlags ); tBuffer.SendString ( ARG(1) ); // index tBuffer.SendString ( ARG(2) ); // words SEND_STRING ( pOpts->m_iBeforeMatch, "" ); SEND_STRING ( pOpts->m_iAfterMatch, "" ); SEND_STRING ( pOpts->m_iChunkSeparator, " ... " ); tBuffer.SendInt ( pOpts->m_iLimit ); tBuffer.SendInt ( pOpts->m_iAround ); tBuffer.SendInt ( pOpts->m_iLimitPassages ); tBuffer.SendInt ( pOpts->m_iLimitWords ); tBuffer.SendInt ( pOpts->m_iPassageId ); SEND_STRING ( pOpts->m_iStripMode, "index" ); SEND_STRING ( pOpts->m_iPassageBoundary, "" ); // single document tBuffer.SendInt ( 1 ); tBuffer.SendString ( ARG(0) ); int iSocket = -1; do { if ( !tBuffer.Finalize() ) { my_error ( ER_QUERY_ON_FOREIGN_DATA_SOURCE, MYF(0), "INTERNAL ERROR: failed to build request" ); break; } iSocket = pOpts->m_tUrl.Connect(); if ( iSocket==-1 ) break; if ( !sphSend ( iSocket, tBuffer.Ptr(), iSize, sphReportErrors ) ) break; CSphResponse * pResponse = CSphResponse::Read ( iSocket, VER_COMMAND_EXCERPT ); if ( !pResponse ) break; close ( iSocket ); pOpts->m_pResponse = pResponse; *pLength = ntohl ( *(DWORD *)pResponse->m_pBody ); return pResponse->m_pBody + sizeof(DWORD); } while(0); if ( iSocket!=-1 ) close ( iSocket ); *pError = 1; return sResult; } #undef SEND_STRING #undef ARG_LEN #undef ARG void sphinx_snippets_deinit ( UDF_INIT * pUDF ) { CSphSnippets * pOpts = (CSphSnippets *)pUDF->ptr; SafeDelete ( pOpts ); } // // $Id: snippets_udf.cc 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/mysqlse/sphinx.5.0.27.diff0000644000176700017710000002467510540355225020352 0ustar deogardeogardiff -B -N -r -u mysql-5.0.22/config/ac-macros/ha_sphinx.m4 mysql-5.0.22.sx/config/ac-macros/ha_sphinx.m4 --- mysql-5.0.22/config/ac-macros/ha_sphinx.m4 1970-01-01 01:00:00.000000000 +0100 +++ mysql-5.0.22.sx/config/ac-macros/ha_sphinx.m4 2006-06-06 19:49:38.000000000 +0200 @@ -0,0 +1,30 @@ +dnl --------------------------------------------------------------------------- +dnl Macro: MYSQL_CHECK_EXAMPLEDB +dnl Sets HAVE_SPHINX_DB if --with-sphinx-storage-engine is used +dnl --------------------------------------------------------------------------- +AC_DEFUN([MYSQL_CHECK_SPHINXDB], [ + AC_ARG_WITH([sphinx-storage-engine], + [ + --with-sphinx-storage-engine + Enable the Sphinx Storage Engine], + [sphinxdb="$withval"], + [sphinxdb=no]) + AC_MSG_CHECKING([for example storage engine]) + + case "$sphinxdb" in + yes ) + AC_DEFINE([HAVE_SPHINX_DB], [1], [Builds Sphinx Engine]) + AC_MSG_RESULT([yes]) + [sphinxdb=yes] + ;; + * ) + AC_MSG_RESULT([no]) + [sphinxdb=no] + ;; + esac + +]) +dnl --------------------------------------------------------------------------- +dnl END OF MYSQL_CHECK_EXAMPLE SECTION +dnl --------------------------------------------------------------------------- + diff -B -N -r -u mysql-5.0.22/configure.in mysql-5.0.22.sx/configure.in --- mysql-5.0.22/configure.in 2006-05-25 10:56:45.000000000 +0200 +++ mysql-5.0.22.sx/configure.in 2006-06-06 19:49:38.000000000 +0200 @@ -41,6 +41,7 @@ sinclude(config/ac-macros/ha_berkeley.m4) sinclude(config/ac-macros/ha_blackhole.m4) sinclude(config/ac-macros/ha_example.m4) +sinclude(config/ac-macros/ha_sphinx.m4) sinclude(config/ac-macros/ha_federated.m4) sinclude(config/ac-macros/ha_innodb.m4) sinclude(config/ac-macros/ha_ndbcluster.m4) @@ -2450,6 +2451,7 @@ MYSQL_CHECK_BDB MYSQL_CHECK_INNODB MYSQL_CHECK_EXAMPLEDB +MYSQL_CHECK_SPHINXDB MYSQL_CHECK_ARCHIVEDB MYSQL_CHECK_CSVDB MYSQL_CHECK_BLACKHOLEDB diff -B -N -r -u mysql-5.0.22/libmysqld/Makefile.am mysql-5.0.22.sx/libmysqld/Makefile.am --- mysql-5.0.22/libmysqld/Makefile.am 2006-05-25 10:56:55.000000000 +0200 +++ mysql-5.0.22.sx/libmysqld/Makefile.am 2006-06-06 19:49:38.000000000 +0200 @@ -27,7 +27,7 @@ -DSHAREDIR="\"$(MYSQLSHAREdir)\"" INCLUDES= @bdb_includes@ \ -I$(top_builddir)/include -I$(top_srcdir)/include \ - -I$(top_srcdir)/sql -I$(top_srcdir)/sql/examples \ + -I$(top_srcdir)/sql -I$(top_srcdir)/sql/examples -I$(top_srcdir)/sql/sphinx \ -I$(top_srcdir)/regex \ $(openssl_includes) $(yassl_includes) @ZLIB_INCLUDES@ @@ -38,6 +38,7 @@ libmysqlsources = errmsg.c get_password.c libmysql.c client.c pack.c \ my_time.c sqlexamplessources = ha_example.cc ha_tina.cc +sqlsphinxsources = ha_sphinx.cc noinst_HEADERS = embedded_priv.h emb_qcache.h @@ -65,7 +66,7 @@ parse_file.cc sql_view.cc sql_trigger.cc my_decimal.cc \ ha_blackhole.cc ha_archive.cc my_user.c -libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) +libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) $(sqlsphinxsources) libmysqld_a_SOURCES= # automake misses these @@ -133,12 +134,16 @@ rm -f $$f; \ @LN_CP_F@ $(top_srcdir)/sql/examples/$$f $$f; \ done; \ + for f in $(sqlsphinxsources); do \ + rm -f $$f; \ + @LN_CP_F@ $(top_srcdir)/sql/sphinx/$$f $$f; \ + done; \ rm -f client_settings.h; \ @LN_CP_F@ $(top_srcdir)/libmysql/client_settings.h client_settings.h clean-local: - rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) | sed "s;\.lo;.c;g"` \ + rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) $(sqlsphinxsources) | sed "s;\.lo;.c;g"` \ $(top_srcdir)/linked_libmysqld_sources; \ rm -f client_settings.h diff -B -N -r -u mysql-5.0.22/sql/handler.cc mysql-5.0.22.sx/sql/handler.cc --- mysql-5.0.22/sql/handler.cc 2006-05-25 10:56:42.000000000 +0200 +++ mysql-5.0.22.sx/sql/handler.cc 2006-06-06 19:49:38.000000000 +0200 @@ -78,6 +78,15 @@ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, HTON_NO_FLAGS }; #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +extern handlerton sphinx_hton; +#else +handlerton sphinx_hton = { "SPHINX", SHOW_OPTION_NO, "SPHINX storage engine", + DB_TYPE_SPHINX_DB, NULL, 0, 0, NULL, NULL, + NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, + HTON_NO_FLAGS }; +#endif #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" extern handlerton innobase_hton; @@ -147,6 +156,7 @@ &example_hton, &archive_hton, &tina_hton, + &sphinx_hton, &ndbcluster_hton, &federated_hton, &myisammrg_hton, @@ -345,6 +355,12 @@ return new (alloc) ha_tina(table); return NULL; #endif +#ifdef HAVE_SPHINX_DB + case DB_TYPE_SPHINX_DB: + if (have_sphinx_db == SHOW_OPTION_YES) + return new (alloc) ha_sphinx(table); + return NULL; +#endif #ifdef HAVE_NDBCLUSTER_DB case DB_TYPE_NDBCLUSTER: if (have_ndbcluster == SHOW_OPTION_YES) diff -B -N -r -u mysql-5.0.22/sql/handler.h mysql-5.0.22.sx/sql/handler.h --- mysql-5.0.22/sql/handler.h 2006-05-25 10:56:55.000000000 +0200 +++ mysql-5.0.22.sx/sql/handler.h 2006-06-06 19:49:38.000000000 +0200 @@ -183,8 +183,9 @@ DB_TYPE_BERKELEY_DB, DB_TYPE_INNODB, DB_TYPE_GEMINI, DB_TYPE_NDBCLUSTER, DB_TYPE_EXAMPLE_DB, DB_TYPE_ARCHIVE_DB, DB_TYPE_CSV_DB, - DB_TYPE_FEDERATED_DB, + DB_TYPE_FEDERATED_DB, DB_TYPE_BLACKHOLE_DB, + DB_TYPE_SPHINX_DB, DB_TYPE_DEFAULT // Must be last }; diff -B -N -r -u mysql-5.0.22/sql/Makefile.am mysql-5.0.22.sx/sql/Makefile.am --- mysql-5.0.22/sql/Makefile.am 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/Makefile.am 2006-06-06 19:49:38.000000000 +0200 @@ -66,6 +66,7 @@ sql_array.h sql_cursor.h \ examples/ha_example.h ha_archive.h \ examples/ha_tina.h ha_blackhole.h \ + sphinx/ha_sphinx.h \ ha_federated.h mysqld_SOURCES = sql_lex.cc sql_handler.cc \ item.cc item_sum.cc item_buff.cc item_func.cc \ @@ -102,6 +103,7 @@ sp_cache.cc parse_file.cc sql_trigger.cc \ examples/ha_example.cc ha_archive.cc \ examples/ha_tina.cc ha_blackhole.cc \ + sphinx/ha_sphinx.cc \ ha_federated.cc gen_lex_hash_SOURCES = gen_lex_hash.cc diff -B -N -r -u mysql-5.0.22/sql/mysqld.cc mysql-5.0.22.sx/sql/mysqld.cc --- mysql-5.0.22/sql/mysqld.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/mysqld.cc 2006-06-06 19:49:38.000000000 +0200 @@ -6420,6 +6420,11 @@ #else have_csv_db= SHOW_OPTION_NO; #endif +#ifdef HAVE_SPHINX_DB + have_sphinx_db= SHOW_OPTION_YES; +#else + have_sphinx_db= SHOW_OPTION_NO; +#endif #ifdef HAVE_NDBCLUSTER_DB have_ndbcluster=SHOW_OPTION_DISABLED; #else @@ -7457,6 +7462,7 @@ #undef have_example_db #undef have_archive_db #undef have_csv_db +#undef have_sphinx_db #undef have_federated_db #undef have_partition_db #undef have_blackhole_db @@ -7467,6 +7473,7 @@ SHOW_COMP_OPTION have_example_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_archive_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_csv_db= SHOW_OPTION_NO; +SHOW_COMP_OPTION have_sphinx_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_federated_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_partition_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_blackhole_db= SHOW_OPTION_NO; diff -B -N -r -u mysql-5.0.22/sql/mysql_priv.h mysql-5.0.22.sx/sql/mysql_priv.h --- mysql-5.0.22/sql/mysql_priv.h 2006-05-25 10:56:43.000000000 +0200 +++ mysql-5.0.22.sx/sql/mysql_priv.h 2006-06-06 19:49:38.000000000 +0200 @@ -1279,6 +1279,12 @@ #else extern SHOW_COMP_OPTION have_csv_db; #endif +#ifdef HAVE_SPHINX_DB +extern handlerton sphinx_hton; +#define have_sphinx_db sphinx_hton.state +#else +extern SHOW_COMP_OPTION have_sphinx_db; +#endif #ifdef HAVE_FEDERATED_DB extern handlerton federated_hton; #define have_federated_db federated_hton.state diff -B -N -r -u mysql-5.0.22/sql/set_var.cc mysql-5.0.22.sx/sql/set_var.cc --- mysql-5.0.22/sql/set_var.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/set_var.cc 2006-06-06 19:49:38.000000000 +0200 @@ -864,6 +864,7 @@ {"have_compress", (char*) &have_compress, SHOW_HAVE}, {"have_crypt", (char*) &have_crypt, SHOW_HAVE}, {"have_csv", (char*) &have_csv_db, SHOW_HAVE}, + {"have_sphinx", (char*) &have_sphinx_db, SHOW_HAVE}, {"have_dynamic_loading", (char*) &have_dlopen, SHOW_HAVE}, {"have_example_engine", (char*) &have_example_db, SHOW_HAVE}, {"have_federated_engine", (char*) &have_federated_db, SHOW_HAVE}, diff -B -N -r -u mysql-5.0.22/sql/sql_lex.h mysql-5.0.22.sx/sql/sql_lex.h --- mysql-5.0.22/sql/sql_lex.h 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_lex.h 2006-06-06 19:49:38.000000000 +0200 @@ -58,6 +58,7 @@ SQLCOM_SHOW_DATABASES, SQLCOM_SHOW_TABLES, SQLCOM_SHOW_FIELDS, SQLCOM_SHOW_KEYS, SQLCOM_SHOW_VARIABLES, SQLCOM_SHOW_LOGS, SQLCOM_SHOW_STATUS, SQLCOM_SHOW_INNODB_STATUS, SQLCOM_SHOW_NDBCLUSTER_STATUS, SQLCOM_SHOW_MUTEX_STATUS, + SQLCOM_SHOW_SPHINX_STATUS, SQLCOM_SHOW_PROCESSLIST, SQLCOM_SHOW_MASTER_STAT, SQLCOM_SHOW_SLAVE_STAT, SQLCOM_SHOW_GRANTS, SQLCOM_SHOW_CREATE, SQLCOM_SHOW_CHARSETS, SQLCOM_SHOW_COLLATIONS, SQLCOM_SHOW_CREATE_DB, SQLCOM_SHOW_TABLE_STATUS, diff -B -N -r -u mysql-5.0.22/sql/sql_parse.cc mysql-5.0.22.sx/sql/sql_parse.cc --- mysql-5.0.22/sql/sql_parse.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_parse.cc 2006-06-06 19:49:38.000000000 +0200 @@ -25,6 +25,9 @@ #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif #ifdef HAVE_NDBCLUSTER_DB #include "ha_ndbcluster.h" @@ -2722,6 +2725,15 @@ break; } #endif +#ifdef HAVE_SPHINX_DB + case SQLCOM_SHOW_SPHINX_STATUS: + { + if (check_global_access(thd, SUPER_ACL)) + goto error; + res = sphinx_show_status(thd); + break; + } +#endif #ifdef HAVE_REPLICATION case SQLCOM_LOAD_MASTER_TABLE: { diff -B -N -r -u mysql-5.0.22/sql/sql_yacc.yy mysql-5.0.22.sx/sql/sql_yacc.yy --- mysql-5.0.22/sql/sql_yacc.yy 2006-05-25 10:56:43.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_yacc.yy 2006-06-06 19:49:38.000000000 +0200 @@ -6584,6 +6584,9 @@ case DB_TYPE_INNODB: Lex->sql_command = SQLCOM_SHOW_INNODB_STATUS; break; + case DB_TYPE_SPHINX_DB: + Lex->sql_command = SQLCOM_SHOW_SPHINX_STATUS; + break; default: my_error(ER_NOT_SUPPORTED_YET, MYF(0), "STATUS"); YYABORT; sphinx-2.0.4-release/mysqlse/make-patch.sh0000755000176700017710000000076311060272446020021 0ustar deogardeogar#!/bin/sh OUT=$1 ORIG=$2 NEW=$3 if [ ! \( "$1" -a "$2" -a "$3" \) ]; then echo "$0 " exit 1 fi FILES=' /config/ac-macros/ha_sphinx.m4 /configure.in /libmysqld/Makefile.am /sql/handler.cc /sql/handler.h /sql/Makefile.am /sql/mysqld.cc /sql/mysql_priv.h /sql/set_var.cc /sql/sql_lex.h /sql/sql_parse.cc /sql/sql_yacc.yy /sql/structs.h /sql/sql_show.cc ' rm -f $OUT if [ -e $OUT ]; then exit 1 fi for name in $FILES; do diff -BNru "$ORIG$name" "$NEW$name" >> $OUT done sphinx-2.0.4-release/mysqlse/sphinx.5.0.22.diff0000644000176700017710000002467210441500053020331 0ustar deogardeogardiff -B -N -r -u mysql-5.0.22/config/ac-macros/ha_sphinx.m4 mysql-5.0.22.sx/config/ac-macros/ha_sphinx.m4 --- mysql-5.0.22/config/ac-macros/ha_sphinx.m4 1970-01-01 01:00:00.000000000 +0100 +++ mysql-5.0.22.sx/config/ac-macros/ha_sphinx.m4 2006-06-06 19:49:38.000000000 +0200 @@ -0,0 +1,30 @@ +dnl --------------------------------------------------------------------------- +dnl Macro: MYSQL_CHECK_EXAMPLEDB +dnl Sets HAVE_SPHINX_DB if --with-sphinx-storage-engine is used +dnl --------------------------------------------------------------------------- +AC_DEFUN([MYSQL_CHECK_SPHINXDB], [ + AC_ARG_WITH([sphinx-storage-engine], + [ + --with-sphinx-storage-engine + Enable the Sphinx Storage Engine], + [sphinxdb="$withval"], + [sphinxdb=no]) + AC_MSG_CHECKING([for example storage engine]) + + case "$sphinxdb" in + yes ) + AC_DEFINE([HAVE_SPHINX_DB], [1], [Builds Sphinx Engine]) + AC_MSG_RESULT([yes]) + [sphinxdb=yes] + ;; + * ) + AC_MSG_RESULT([no]) + [sphinxdb=no] + ;; + esac + +]) +dnl --------------------------------------------------------------------------- +dnl END OF MYSQL_CHECK_EXAMPLE SECTION +dnl --------------------------------------------------------------------------- + diff -B -N -r -u mysql-5.0.22/configure.in mysql-5.0.22.sx/configure.in --- mysql-5.0.22/configure.in 2006-05-25 10:56:45.000000000 +0200 +++ mysql-5.0.22.sx/configure.in 2006-06-06 19:49:38.000000000 +0200 @@ -41,6 +41,7 @@ sinclude(config/ac-macros/ha_berkeley.m4) sinclude(config/ac-macros/ha_blackhole.m4) sinclude(config/ac-macros/ha_example.m4) +sinclude(config/ac-macros/ha_sphinx.m4) sinclude(config/ac-macros/ha_federated.m4) sinclude(config/ac-macros/ha_innodb.m4) sinclude(config/ac-macros/ha_ndbcluster.m4) @@ -2450,6 +2451,7 @@ MYSQL_CHECK_BDB MYSQL_CHECK_INNODB MYSQL_CHECK_EXAMPLEDB +MYSQL_CHECK_SPHINXDB MYSQL_CHECK_ARCHIVEDB MYSQL_CHECK_CSVDB MYSQL_CHECK_BLACKHOLEDB diff -B -N -r -u mysql-5.0.22/libmysqld/Makefile.am mysql-5.0.22.sx/libmysqld/Makefile.am --- mysql-5.0.22/libmysqld/Makefile.am 2006-05-25 10:56:55.000000000 +0200 +++ mysql-5.0.22.sx/libmysqld/Makefile.am 2006-06-06 19:49:38.000000000 +0200 @@ -27,7 +27,7 @@ -DSHAREDIR="\"$(MYSQLSHAREdir)\"" INCLUDES= @bdb_includes@ \ -I$(top_builddir)/include -I$(top_srcdir)/include \ - -I$(top_srcdir)/sql -I$(top_srcdir)/sql/examples \ + -I$(top_srcdir)/sql -I$(top_srcdir)/sql/examples -I$(top_srcdir)/sql/sphinx \ -I$(top_srcdir)/regex \ $(openssl_includes) $(yassl_includes) @ZLIB_INCLUDES@ @@ -38,6 +38,7 @@ libmysqlsources = errmsg.c get_password.c libmysql.c client.c pack.c \ my_time.c sqlexamplessources = ha_example.cc ha_tina.cc +sqlsphinxsources = ha_sphinx.cc noinst_HEADERS = embedded_priv.h emb_qcache.h @@ -65,7 +66,7 @@ parse_file.cc sql_view.cc sql_trigger.cc my_decimal.cc \ ha_blackhole.cc ha_archive.cc my_user.c -libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) +libmysqld_int_a_SOURCES= $(libmysqld_sources) $(libmysqlsources) $(sqlsources) $(sqlexamplessources) $(sqlsphinxsources) libmysqld_a_SOURCES= # automake misses these @@ -133,12 +134,16 @@ rm -f $$f; \ @LN_CP_F@ $(top_srcdir)/sql/examples/$$f $$f; \ done; \ + for f in $(sqlsphinxsources); do \ + rm -f $$f; \ + @LN_CP_F@ $(top_srcdir)/sql/sphinx/$$f $$f; \ + done; \ rm -f client_settings.h; \ @LN_CP_F@ $(top_srcdir)/libmysql/client_settings.h client_settings.h clean-local: - rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) | sed "s;\.lo;.c;g"` \ + rm -f `echo $(sqlsources) $(libmysqlsources) $(sqlexamplessources) $(sqlsphinxsources) | sed "s;\.lo;.c;g"` \ $(top_srcdir)/linked_libmysqld_sources; \ rm -f client_settings.h diff -B -N -r -u mysql-5.0.22/sql/handler.cc mysql-5.0.22.sx/sql/handler.cc --- mysql-5.0.22/sql/handler.cc 2006-05-25 10:56:42.000000000 +0200 +++ mysql-5.0.22.sx/sql/handler.cc 2006-06-06 19:49:38.000000000 +0200 @@ -78,6 +78,15 @@ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, HTON_NO_FLAGS }; #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +extern handlerton sphinx_hton; +#else +handlerton sphinx_hton = { "SPHINX", SHOW_OPTION_NO, "SPHINX storage engine", + DB_TYPE_SPHINX_DB, NULL, 0, 0, NULL, NULL, + NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, + HTON_NO_FLAGS }; +#endif #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" extern handlerton innobase_hton; @@ -147,6 +156,7 @@ &example_hton, &archive_hton, &tina_hton, + &sphinx_hton, &ndbcluster_hton, &federated_hton, &myisammrg_hton, @@ -345,6 +355,12 @@ return new (alloc) ha_tina(table); return NULL; #endif +#ifdef HAVE_SPHINX_DB + case DB_TYPE_SPHINX_DB: + if (have_sphinx_db == SHOW_OPTION_YES) + return new (alloc) ha_sphinx(table); + return NULL; +#endif #ifdef HAVE_NDBCLUSTER_DB case DB_TYPE_NDBCLUSTER: if (have_ndbcluster == SHOW_OPTION_YES) diff -B -N -r -u mysql-5.0.22/sql/handler.h mysql-5.0.22.sx/sql/handler.h --- mysql-5.0.22/sql/handler.h 2006-05-25 10:56:55.000000000 +0200 +++ mysql-5.0.22.sx/sql/handler.h 2006-06-06 19:49:38.000000000 +0200 @@ -183,8 +183,9 @@ DB_TYPE_BERKELEY_DB, DB_TYPE_INNODB, DB_TYPE_GEMINI, DB_TYPE_NDBCLUSTER, DB_TYPE_EXAMPLE_DB, DB_TYPE_ARCHIVE_DB, DB_TYPE_CSV_DB, - DB_TYPE_FEDERATED_DB, + DB_TYPE_FEDERATED_DB, DB_TYPE_BLACKHOLE_DB, + DB_TYPE_SPHINX_DB, DB_TYPE_DEFAULT // Must be last }; diff -B -N -r -u mysql-5.0.22/sql/Makefile.am mysql-5.0.22.sx/sql/Makefile.am --- mysql-5.0.22/sql/Makefile.am 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/Makefile.am 2006-06-06 19:49:38.000000000 +0200 @@ -66,6 +66,7 @@ sql_array.h sql_cursor.h \ examples/ha_example.h ha_archive.h \ examples/ha_tina.h ha_blackhole.h \ + sphinx/ha_sphinx.h \ ha_federated.h mysqld_SOURCES = sql_lex.cc sql_handler.cc \ item.cc item_sum.cc item_buff.cc item_func.cc \ @@ -102,6 +103,7 @@ sp_cache.cc parse_file.cc sql_trigger.cc \ examples/ha_example.cc ha_archive.cc \ examples/ha_tina.cc ha_blackhole.cc \ + sphinx/ha_sphinx.cc \ ha_federated.cc gen_lex_hash_SOURCES = gen_lex_hash.cc diff -B -N -r -u mysql-5.0.22/sql/mysqld.cc mysql-5.0.22.sx/sql/mysqld.cc --- mysql-5.0.22/sql/mysqld.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/mysqld.cc 2006-06-06 19:49:38.000000000 +0200 @@ -6420,6 +6420,11 @@ #else have_csv_db= SHOW_OPTION_NO; #endif +#ifdef HAVE_SPHINX_DB + have_sphinx_db= SHOW_OPTION_YES; +#else + have_sphinx_db= SHOW_OPTION_NO; +#endif #ifdef HAVE_NDBCLUSTER_DB have_ndbcluster=SHOW_OPTION_DISABLED; #else @@ -7457,6 +7462,7 @@ #undef have_example_db #undef have_archive_db #undef have_csv_db +#undef have_sphinx_db #undef have_federated_db #undef have_partition_db #undef have_blackhole_db @@ -7467,6 +7473,7 @@ SHOW_COMP_OPTION have_example_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_archive_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_csv_db= SHOW_OPTION_NO; +SHOW_COMP_OPTION have_sphinx_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_federated_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_partition_db= SHOW_OPTION_NO; SHOW_COMP_OPTION have_blackhole_db= SHOW_OPTION_NO; diff -B -N -r -u mysql-5.0.22/sql/mysql_priv.h mysql-5.0.22.sx/sql/mysql_priv.h --- mysql-5.0.22/sql/mysql_priv.h 2006-05-25 10:56:43.000000000 +0200 +++ mysql-5.0.22.sx/sql/mysql_priv.h 2006-06-06 19:49:38.000000000 +0200 @@ -1279,6 +1279,12 @@ #else extern SHOW_COMP_OPTION have_csv_db; #endif +#ifdef HAVE_SPHINX_DB +extern handlerton sphinx_hton; +#define have_sphinx_db sphinx_hton.state +#else +extern SHOW_COMP_OPTION have_sphinx_db; +#endif #ifdef HAVE_FEDERATED_DB extern handlerton federated_hton; #define have_federated_db federated_hton.state diff -B -N -r -u mysql-5.0.22/sql/set_var.cc mysql-5.0.22.sx/sql/set_var.cc --- mysql-5.0.22/sql/set_var.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/set_var.cc 2006-06-06 19:49:38.000000000 +0200 @@ -809,6 +809,7 @@ {"have_compress", (char*) &have_compress, SHOW_HAVE}, {"have_crypt", (char*) &have_crypt, SHOW_HAVE}, {"have_csv", (char*) &have_csv_db, SHOW_HAVE}, + {"have_sphinx", (char*) &have_sphinx_db, SHOW_HAVE}, {"have_example_engine", (char*) &have_example_db, SHOW_HAVE}, {"have_federated_engine", (char*) &have_federated_db, SHOW_HAVE}, {"have_geometry", (char*) &have_geometry, SHOW_HAVE}, diff -B -N -r -u mysql-5.0.22/sql/sql_lex.h mysql-5.0.22.sx/sql/sql_lex.h --- mysql-5.0.22/sql/sql_lex.h 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_lex.h 2006-06-06 19:49:38.000000000 +0200 @@ -58,6 +58,7 @@ SQLCOM_SHOW_DATABASES, SQLCOM_SHOW_TABLES, SQLCOM_SHOW_FIELDS, SQLCOM_SHOW_KEYS, SQLCOM_SHOW_VARIABLES, SQLCOM_SHOW_LOGS, SQLCOM_SHOW_STATUS, SQLCOM_SHOW_INNODB_STATUS, SQLCOM_SHOW_NDBCLUSTER_STATUS, SQLCOM_SHOW_MUTEX_STATUS, + SQLCOM_SHOW_SPHINX_STATUS, SQLCOM_SHOW_PROCESSLIST, SQLCOM_SHOW_MASTER_STAT, SQLCOM_SHOW_SLAVE_STAT, SQLCOM_SHOW_GRANTS, SQLCOM_SHOW_CREATE, SQLCOM_SHOW_CHARSETS, SQLCOM_SHOW_COLLATIONS, SQLCOM_SHOW_CREATE_DB, SQLCOM_SHOW_TABLE_STATUS, diff -B -N -r -u mysql-5.0.22/sql/sql_parse.cc mysql-5.0.22.sx/sql/sql_parse.cc --- mysql-5.0.22/sql/sql_parse.cc 2006-05-25 10:56:41.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_parse.cc 2006-06-06 19:49:38.000000000 +0200 @@ -25,6 +25,9 @@ #ifdef HAVE_INNOBASE_DB #include "ha_innodb.h" #endif +#ifdef HAVE_SPHINX_DB +#include "sphinx/ha_sphinx.h" +#endif #ifdef HAVE_NDBCLUSTER_DB #include "ha_ndbcluster.h" @@ -2722,6 +2725,15 @@ break; } #endif +#ifdef HAVE_SPHINX_DB + case SQLCOM_SHOW_SPHINX_STATUS: + { + if (check_global_access(thd, SUPER_ACL)) + goto error; + res = sphinx_show_status(thd); + break; + } +#endif #ifdef HAVE_REPLICATION case SQLCOM_LOAD_MASTER_TABLE: { diff -B -N -r -u mysql-5.0.22/sql/sql_yacc.yy mysql-5.0.22.sx/sql/sql_yacc.yy --- mysql-5.0.22/sql/sql_yacc.yy 2006-05-25 10:56:43.000000000 +0200 +++ mysql-5.0.22.sx/sql/sql_yacc.yy 2006-06-06 19:49:38.000000000 +0200 @@ -6584,6 +6584,9 @@ case DB_TYPE_INNODB: Lex->sql_command = SQLCOM_SHOW_INNODB_STATUS; break; + case DB_TYPE_SPHINX_DB: + Lex->sql_command = SQLCOM_SHOW_SPHINX_STATUS; + break; default: my_error(ER_NOT_SUPPORTED_YET, MYF(0), "STATUS"); YYABORT; sphinx-2.0.4-release/contrib/0000755000176700017710000000000011724063141015402 5ustar deogardeogarsphinx-2.0.4-release/contrib/scripts/0000755000176700017710000000000011724063141017071 5ustar deogardeogarsphinx-2.0.4-release/contrib/scripts/searchd0000644000176700017710000000230510540327474020434 0ustar deogardeogar#!/bin/bash # # Init file for searchd # # chkconfig: 2345 55 25 # # description: searchd # # USE "chkconfig --add searchd" to configure Sphinx searchd service # # by Vladimir Fedorkov Mar 1, 2006, info@astellar.com # public domain SUDO_USER=searchd BASE_PATH=/release/search PID_FILE=$BASE_PATH/searchd.pid CONFIG_FILE=$BASE_PATH/sphinx.conf EXEC_PATH=$BASE_PATH LOG_PATH=$EXEC_PATH RETVAL=0 prog="searchd" do_config() { mkdir -p $EXEC_PATH mkdir $EXEC_PATH/data mkdir -p $LOG_PATH chown -R $SUDO_USER $EXEC_PATH chown -R $SUDO_USER $EXEC_PATH/$CONFIG_FILE chown -R $SUDO_USER $LOG_PATH chmod 600 $EXEC_PATH/$CONFIG_FILE chmod u+rwx $EXEC_PATH/* chmod -R u+rw,go-rwx $EXEC_PATH/data chmod -R u+rw,go-rwx $LOG_PATH } do_start() { echo "Starting $prog" sudo -u $SUDO_USER $EXEC_PATH/$prog --config $CONFIG_FILE RETVAL=$? echo return $RETVAL } do_stop() { echo "Stopping $prog" if [ -e $PID_FILE ] ; then kill -15 `cat $PID_FILE` sleep 5 if [ -e $PID_FILE ] ; then kill -9 `cat $PID_FILE` fi fi RETVAL=$? echo return $RETVAL } case $* in config) do_config ;; start) do_start ;; stop) do_stop ;; *) echo "usage: $0 {start|stop|config}" >&2 exit 1 ;; esac exit $RETVAL sphinx-2.0.4-release/contrib/scripts/cachecleanup.sh0000644000176700017710000000105710540327474022052 0ustar deogardeogar#!/bin/bash # cache directory cleanup script example # # removes all old cached files with mtime older than index mtime # # there MUST be your ACTUAL index names and FULL PATHS to indexfiles indexnames=( test1 test2 ) indexfiles=( /usr/local/sphinx/test1.spd /benchmarks/work/test/test2.spd ) cachedir=/tmp/cache for element in $(seq 0 $((${#indexnames[@]} - 1))) do echo "processing index ${indexnames[$element]}" find "$cachedir/${indexnames[$element]}" \( ! -newer "${indexfiles[$element]}" \) -type f -print0 | xargs -0 -r rm -f done sphinx-2.0.4-release/contrib/README0000644000176700017710000000101610540330163016254 0ustar deogardeogarUser contributions ------------------- This directory contains user-contributed Sphinx stuff - native API ports to different programming languages, useful scripts, not-yet-official patches, etc. All materials within this directory are copyright their respective authors, including (but not limited to): * Perl API is copyright (c) 2006, Len Kranendonk http://www.ilance.nl/sphinx_perl_api * Ruby API is copyright (c) 2006, Dmytro Shteflyuk http://kpumuk.info Many thanks to everyone who contributed! --eof-- sphinx-2.0.4-release/COPYING0000644000176700017710000004312610230545767015015 0ustar deogardeogar GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) 19yy This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) 19yy name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. sphinx-2.0.4-release/sphinx.conf.in0000644000176700017710000006217311553046454016550 0ustar deogardeogar# # Sphinx configuration file sample # # WARNING! While this sample file mentions all available options, # it contains (very) short helper descriptions only. Please refer to # doc/sphinx.html for details. # ############################################################################# ## data source definition ############################################################################# source src1 { # data source type. mandatory, no default value # known types are mysql, pgsql, mssql, xmlpipe, xmlpipe2, odbc type = mysql ##################################################################### ## SQL settings (for 'mysql' and 'pgsql' types) ##################################################################### # some straightforward parameters for SQL source types sql_host = localhost sql_user = test sql_pass = sql_db = test sql_port = 3306 # optional, default is 3306 # UNIX socket name # optional, default is empty (reuse client library defaults) # usually '/var/lib/mysql/mysql.sock' on Linux # usually '/tmp/mysql.sock' on FreeBSD # # sql_sock = /tmp/mysql.sock # MySQL specific client connection flags # optional, default is 0 # # mysql_connect_flags = 32 # enable compression # MySQL specific SSL certificate settings # optional, defaults are empty # # mysql_ssl_cert = /etc/ssl/client-cert.pem # mysql_ssl_key = /etc/ssl/client-key.pem # mysql_ssl_ca = /etc/ssl/cacert.pem # MS SQL specific Windows authentication mode flag # MUST be in sync with charset_type index-level setting # optional, default is 0 # # mssql_winauth = 1 # use currently logged on user credentials # MS SQL specific Unicode indexing flag # optional, default is 0 (request SBCS data) # # mssql_unicode = 1 # request Unicode data from server # ODBC specific DSN (data source name) # mandatory for odbc source type, no default value # # odbc_dsn = DBQ=C:\data;DefaultDir=C:\data;Driver={Microsoft Text Driver (*.txt; *.csv)}; # sql_query = SELECT id, data FROM documents.csv # ODBC and MS SQL specific, per-column buffer sizes # optional, default is auto-detect # # sql_column_buffers = content=12M, comments=1M # pre-query, executed before the main fetch query # multi-value, optional, default is empty list of queries # # sql_query_pre = SET NAMES utf8 # sql_query_pre = SET SESSION query_cache_type=OFF # main document fetch query # mandatory, integer document ID field MUST be the first selected column sql_query = \ SELECT id, group_id, UNIX_TIMESTAMP(date_added) AS date_added, title, content \ FROM documents # joined/payload field fetch query # joined fields let you avoid (slow) JOIN and GROUP_CONCAT # payload fields let you attach custom per-keyword values (eg. for ranking) # # syntax is FIELD-NAME 'from' ( 'query' | 'payload-query' ); QUERY # joined field QUERY should return 2 columns (docid, text) # payload field QUERY should return 3 columns (docid, keyword, weight) # # REQUIRES that query results are in ascending document ID order! # multi-value, optional, default is empty list of queries # # sql_joined_field = tags from query; SELECT docid, CONCAT('tag',tagid) FROM tags ORDER BY docid ASC # sql_joined_field = wtags from payload-query; SELECT docid, tag, tagweight FROM tags ORDER BY docid ASC # file based field declaration # # content of this field is treated as a file name # and the file gets loaded and indexed in place of a field # # max file size is limited by max_file_field_buffer indexer setting # file IO errors are non-fatal and get reported as warnings # # sql_file_field = content_file_path # range query setup, query that must return min and max ID values # optional, default is empty # # sql_query will need to reference $start and $end boundaries # if using ranged query: # # sql_query = \ # SELECT doc.id, doc.id AS group, doc.title, doc.data \ # FROM documents doc \ # WHERE id>=$start AND id<=$end # # sql_query_range = SELECT MIN(id),MAX(id) FROM documents # range query step # optional, default is 1024 # # sql_range_step = 1000 # unsigned integer attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # optional bit size can be specified, default is 32 # # sql_attr_uint = author_id # sql_attr_uint = forum_id:9 # 9 bits for forum_id sql_attr_uint = group_id # boolean attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # equivalent to sql_attr_uint with 1-bit size # # sql_attr_bool = is_deleted # bigint attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # declares a signed (unlike uint!) 64-bit attribute # # sql_attr_bigint = my_bigint_id # UNIX timestamp attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # similar to integer, but can also be used in date functions # # sql_attr_timestamp = posted_ts # sql_attr_timestamp = last_edited_ts sql_attr_timestamp = date_added # string ordinal attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # sorts strings (bytewise), and stores their indexes in the sorted list # sorting by this attr is equivalent to sorting by the original strings # # sql_attr_str2ordinal = author_name # floating point attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # values are stored in single precision, 32-bit IEEE 754 format # # sql_attr_float = lat_radians # sql_attr_float = long_radians # multi-valued attribute (MVA) attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # MVA values are variable length lists of unsigned 32-bit integers # # syntax is ATTR-TYPE ATTR-NAME 'from' SOURCE-TYPE [;QUERY] [;RANGE-QUERY] # ATTR-TYPE is 'uint' or 'timestamp' # SOURCE-TYPE is 'field', 'query', or 'ranged-query' # QUERY is SQL query used to fetch all ( docid, attrvalue ) pairs # RANGE-QUERY is SQL query used to fetch min and max ID values, similar to 'sql_query_range' # # sql_attr_multi = uint tag from query; SELECT docid, tagid FROM tags # sql_attr_multi = uint tag from ranged-query; \ # SELECT docid, tagid FROM tags WHERE id>=$start AND id<=$end; \ # SELECT MIN(docid), MAX(docid) FROM tags # string attribute declaration # multi-value (an arbitrary number of these is allowed), optional # lets you store and retrieve strings # # sql_attr_string = stitle # wordcount attribute declaration # multi-value (an arbitrary number of these is allowed), optional # lets you count the words at indexing time # # sql_attr_str2wordcount = stitle # combined field plus attribute declaration (from a single column) # stores column as an attribute, but also indexes it as a full-text field # # sql_field_string = author # sql_field_str2wordcount = title # post-query, executed on sql_query completion # optional, default is empty # # sql_query_post = # post-index-query, executed on successful indexing completion # optional, default is empty # $maxid expands to max document ID actually fetched from DB # # sql_query_post_index = REPLACE INTO counters ( id, val ) \ # VALUES ( 'max_indexed_id', $maxid ) # ranged query throttling, in milliseconds # optional, default is 0 which means no delay # enforces given delay before each query step sql_ranged_throttle = 0 # document info query, ONLY for CLI search (ie. testing and debugging) # optional, default is empty # must contain $id macro and must fetch the document by that id sql_query_info = SELECT * FROM documents WHERE id=$id # kill-list query, fetches the document IDs for kill-list # k-list will suppress matches from preceding indexes in the same query # optional, default is empty # # sql_query_killlist = SELECT id FROM documents WHERE edited>=@last_reindex # columns to unpack on indexer side when indexing # multi-value, optional, default is empty list # # unpack_zlib = zlib_column # unpack_mysqlcompress = compressed_column # unpack_mysqlcompress = compressed_column_2 # maximum unpacked length allowed in MySQL COMPRESS() unpacker # optional, default is 16M # # unpack_mysqlcompress_maxsize = 16M ##################################################################### ## xmlpipe2 settings ##################################################################### # type = xmlpipe # shell command to invoke xmlpipe stream producer # mandatory # # xmlpipe_command = cat @CONFDIR@/test.xml # xmlpipe2 field declaration # multi-value, optional, default is empty # # xmlpipe_field = subject # xmlpipe_field = content # xmlpipe2 attribute declaration # multi-value, optional, default is empty # all xmlpipe_attr_XXX options are fully similar to sql_attr_XXX # # xmlpipe_attr_timestamp = published # xmlpipe_attr_uint = author_id # perform UTF-8 validation, and filter out incorrect codes # avoids XML parser choking on non-UTF-8 documents # optional, default is 0 # # xmlpipe_fixup_utf8 = 1 } # inherited source example # # all the parameters are copied from the parent source, # and may then be overridden in this source definition source src1throttled : src1 { sql_ranged_throttle = 100 } ############################################################################# ## index definition ############################################################################# # local index example # # this is an index which is stored locally in the filesystem # # all indexing-time options (such as morphology and charsets) # are configured per local index index test1 { # index type # optional, default is 'plain' # known values are 'plain', 'distributed', and 'rt' (see samples below) # type = plain # document source(s) to index # multi-value, mandatory # document IDs must be globally unique across all sources source = src1 # index files path and file name, without extension # mandatory, path must be writable, extensions will be auto-appended path = @CONFDIR@/data/test1 # document attribute values (docinfo) storage mode # optional, default is 'extern' # known values are 'none', 'extern' and 'inline' docinfo = extern # memory locking for cached data (.spa and .spi), to prevent swapping # optional, default is 0 (do not mlock) # requires searchd to be run from root mlock = 0 # a list of morphology preprocessors to apply # optional, default is empty # # builtin preprocessors are 'none', 'stem_en', 'stem_ru', 'stem_enru', # 'soundex', and 'metaphone'; additional preprocessors available from # libstemmer are 'libstemmer_XXX', where XXX is algorithm code # (see libstemmer_c/libstemmer/modules.txt) # # morphology = stem_en, stem_ru, soundex # morphology = libstemmer_german # morphology = libstemmer_sv morphology = none # minimum word length at which to enable stemming # optional, default is 1 (stem everything) # # min_stemming_len = 1 # stopword files list (space separated) # optional, default is empty # contents are plain text, charset_table and stemming are both applied # # stopwords = @CONFDIR@/data/stopwords.txt # wordforms file, in "mapfrom > mapto" plain text format # optional, default is empty # # wordforms = @CONFDIR@/data/wordforms.txt # tokenizing exceptions file # optional, default is empty # # plain text, case sensitive, space insensitive in map-from part # one "Map Several Words => ToASingleOne" entry per line # # exceptions = @CONFDIR@/data/exceptions.txt # minimum indexed word length # default is 1 (index everything) min_word_len = 1 # charset encoding type # optional, default is 'sbcs' # known types are 'sbcs' (Single Byte CharSet) and 'utf-8' charset_type = sbcs # charset definition and case folding rules "table" # optional, default value depends on charset_type # # defaults are configured to include English and Russian characters only # you need to change the table to include additional ones # this behavior MAY change in future versions # # 'sbcs' default value is # charset_table = 0..9, A..Z->a..z, _, a..z, U+A8->U+B8, U+B8, U+C0..U+DF->U+E0..U+FF, U+E0..U+FF # # 'utf-8' default value is # charset_table = 0..9, A..Z->a..z, _, a..z, U+410..U+42F->U+430..U+44F, U+430..U+44F # ignored characters list # optional, default value is empty # # ignore_chars = U+00AD # minimum word prefix length to index # optional, default is 0 (do not index prefixes) # # min_prefix_len = 0 # minimum word infix length to index # optional, default is 0 (do not index infixes) # # min_infix_len = 0 # list of fields to limit prefix/infix indexing to # optional, default value is empty (index all fields in prefix/infix mode) # # prefix_fields = filename # infix_fields = url, domain # enable star-syntax (wildcards) when searching prefix/infix indexes # search-time only, does not affect indexing, can be 0 or 1 # optional, default is 0 (do not use wildcard syntax) # # enable_star = 1 # expand keywords with exact forms and/or stars when searching fit indexes # search-time only, does not affect indexing, can be 0 or 1 # optional, default is 0 (do not expand keywords) # # expand_keywords = 1 # n-gram length to index, for CJK indexing # only supports 0 and 1 for now, other lengths to be implemented # optional, default is 0 (disable n-grams) # # ngram_len = 1 # n-gram characters list, for CJK indexing # optional, default is empty # # ngram_chars = U+3000..U+2FA1F # phrase boundary characters list # optional, default is empty # # phrase_boundary = ., ?, !, U+2026 # horizontal ellipsis # phrase boundary word position increment # optional, default is 0 # # phrase_boundary_step = 100 # blended characters list # blended chars are indexed both as separators and valid characters # for instance, AT&T will results in 3 tokens ("at", "t", and "at&t") # optional, default is empty # # blend_chars = +, &, U+23 # blended token indexing mode # a comma separated list of blended token indexing variants # known variants are trim_none, trim_head, trim_tail, trim_both, skip_pure # optional, default is trim_none # # blend_mode = trim_tail, skip_pure # whether to strip HTML tags from incoming documents # known values are 0 (do not strip) and 1 (do strip) # optional, default is 0 html_strip = 0 # what HTML attributes to index if stripping HTML # optional, default is empty (do not index anything) # # html_index_attrs = img=alt,title; a=title; # what HTML elements contents to strip # optional, default is empty (do not strip element contents) # # html_remove_elements = style, script # whether to preopen index data files on startup # optional, default is 0 (do not preopen), searchd-only # # preopen = 1 # whether to keep dictionary (.spi) on disk, or cache it in RAM # optional, default is 0 (cache in RAM), searchd-only # # ondisk_dict = 1 # whether to enable in-place inversion (2x less disk, 90-95% speed) # optional, default is 0 (use separate temporary files), indexer-only # # inplace_enable = 1 # in-place fine-tuning options # optional, defaults are listed below # # inplace_hit_gap = 0 # preallocated hitlist gap size # inplace_docinfo_gap = 0 # preallocated docinfo gap size # inplace_reloc_factor = 0.1 # relocation buffer size within arena # inplace_write_factor = 0.1 # write buffer size within arena # whether to index original keywords along with stemmed versions # enables "=exactform" operator to work # optional, default is 0 # # index_exact_words = 1 # position increment on overshort (less that min_word_len) words # optional, allowed values are 0 and 1, default is 1 # # overshort_step = 1 # position increment on stopword # optional, allowed values are 0 and 1, default is 1 # # stopword_step = 1 # hitless words list # positions for these keywords will not be stored in the index # optional, allowed values are 'all', or a list file name # # hitless_words = all # hitless_words = hitless.txt # detect and index sentence and paragraph boundaries # required for the SENTENCE and PARAGRAPH operators to work # optional, allowed values are 0 and 1, default is 0 # # index_sp = 1 # index zones, delimited by HTML/XML tags # a comma separated list of tags and wildcards # required for the ZONE operator to work # optional, default is empty string (do not index zones) # # index_zones = title, h*, th } # inherited index example # # all the parameters are copied from the parent index, # and may then be overridden in this index definition index test1stemmed : test1 { path = @CONFDIR@/data/test1stemmed morphology = stem_en } # distributed index example # # this is a virtual index which can NOT be directly indexed, # and only contains references to other local and/or remote indexes index dist1 { # 'distributed' index type MUST be specified type = distributed # local index to be searched # there can be many local indexes configured local = test1 local = test1stemmed # remote agent # multiple remote agents may be specified # syntax for TCP connections is 'hostname:port:index1,[index2[,...]]' # syntax for local UNIX connections is '/path/to/socket:index1,[index2[,...]]' agent = localhost:9313:remote1 agent = localhost:9314:remote2,remote3 # agent = /var/run/searchd.sock:remote4 # blackhole remote agent, for debugging/testing # network errors and search results will be ignored # # agent_blackhole = testbox:9312:testindex1,testindex2 # remote agent connection timeout, milliseconds # optional, default is 1000 ms, ie. 1 sec agent_connect_timeout = 1000 # remote agent query timeout, milliseconds # optional, default is 3000 ms, ie. 3 sec agent_query_timeout = 3000 } # realtime index example # # you can run INSERT, REPLACE, and DELETE on this index on the fly # using MySQL protocol (see 'listen' directive below) index rt { # 'rt' index type must be specified to use RT index type = rt # index files path and file name, without extension # mandatory, path must be writable, extensions will be auto-appended path = @CONFDIR@/data/rt # RAM chunk size limit # RT index will keep at most this much data in RAM, then flush to disk # optional, default is 32M # # rt_mem_limit = 512M # full-text field declaration # multi-value, mandatory rt_field = title rt_field = content # unsigned integer attribute declaration # multi-value (an arbitrary number of attributes is allowed), optional # declares an unsigned 32-bit attribute rt_attr_uint = gid # RT indexes currently support the following attribute types: # uint, bigint, float, timestamp, string # # rt_attr_bigint = guid # rt_attr_float = gpa # rt_attr_timestamp = ts_added # rt_attr_string = author } ############################################################################# ## indexer settings ############################################################################# indexer { # memory limit, in bytes, kiloytes (16384K) or megabytes (256M) # optional, default is 32M, max is 2047M, recommended is 256M to 1024M mem_limit = 32M # maximum IO calls per second (for I/O throttling) # optional, default is 0 (unlimited) # # max_iops = 40 # maximum IO call size, bytes (for I/O throttling) # optional, default is 0 (unlimited) # # max_iosize = 1048576 # maximum xmlpipe2 field length, bytes # optional, default is 2M # # max_xmlpipe2_field = 4M # write buffer size, bytes # several (currently up to 4) buffers will be allocated # write buffers are allocated in addition to mem_limit # optional, default is 1M # # write_buffer = 1M # maximum file field adaptive buffer size # optional, default is 8M, minimum is 1M # # max_file_field_buffer = 32M } ############################################################################# ## searchd settings ############################################################################# searchd { # [hostname:]port[:protocol], or /unix/socket/path to listen on # known protocols are 'sphinx' (SphinxAPI) and 'mysql41' (SphinxQL) # # multi-value, multiple listen points are allowed # optional, defaults are 9312:sphinx and 9306:mysql41, as below # # listen = 127.0.0.1 # listen = 192.168.0.1:9312 # listen = 9312 # listen = /var/run/searchd.sock listen = 9312 listen = 9306:mysql41 # log file, searchd run info is logged here # optional, default is 'searchd.log' log = @CONFDIR@/log/searchd.log # query log file, all search queries are logged here # optional, default is empty (do not log queries) query_log = @CONFDIR@/log/query.log # client read timeout, seconds # optional, default is 5 read_timeout = 5 # request timeout, seconds # optional, default is 5 minutes client_timeout = 300 # maximum amount of children to fork (concurrent searches to run) # optional, default is 0 (unlimited) max_children = 30 # PID file, searchd process ID file name # mandatory pid_file = @CONFDIR@/log/searchd.pid # max amount of matches the daemon ever keeps in RAM, per-index # WARNING, THERE'S ALSO PER-QUERY LIMIT, SEE SetLimits() API CALL # default is 1000 (just like Google) max_matches = 1000 # seamless rotate, prevents rotate stalls if precaching huge datasets # optional, default is 1 seamless_rotate = 1 # whether to forcibly preopen all indexes on startup # optional, default is 1 (preopen everything) preopen_indexes = 1 # whether to unlink .old index copies on succesful rotation. # optional, default is 1 (do unlink) unlink_old = 1 # attribute updates periodic flush timeout, seconds # updates will be automatically dumped to disk this frequently # optional, default is 0 (disable periodic flush) # # attr_flush_period = 900 # instance-wide ondisk_dict defaults (per-index value take precedence) # optional, default is 0 (precache all dictionaries in RAM) # # ondisk_dict_default = 1 # MVA updates pool size # shared between all instances of searchd, disables attr flushes! # optional, default size is 1M mva_updates_pool = 1M # max allowed network packet size # limits both query packets from clients, and responses from agents # optional, default size is 8M max_packet_size = 8M # crash log path # searchd will (try to) log crashed query to 'crash_log_path.PID' file # optional, default is empty (do not create crash logs) # # crash_log_path = @CONFDIR@/log/crash # max allowed per-query filter count # optional, default is 256 max_filters = 256 # max allowed per-filter values count # optional, default is 4096 max_filter_values = 4096 # socket listen queue length # optional, default is 5 # # listen_backlog = 5 # per-keyword read buffer size # optional, default is 256K # # read_buffer = 256K # unhinted read size (currently used when reading hits) # optional, default is 32K # # read_unhinted = 32K # max allowed per-batch query count (aka multi-query count) # optional, default is 32 max_batch_queries = 32 # max common subtree document cache size, per-query # optional, default is 0 (disable subtree optimization) # # subtree_docs_cache = 4M # max common subtree hit cache size, per-query # optional, default is 0 (disable subtree optimization) # # subtree_hits_cache = 8M # multi-processing mode (MPM) # known values are none, fork, prefork, and threads # optional, default is fork # workers = threads # for RT to work # max threads to create for searching local parts of a distributed index # optional, default is 0, which means disable multi-threaded searching # should work with all MPMs (ie. does NOT require workers=threads) # # dist_threads = 4 # binlog files path; use empty string to disable binlog # optional, default is build-time configured data directory # # binlog_path = # disable logging # binlog_path = @CONFDIR@/data # binlog.001 etc will be created there # binlog flush/sync mode # 0 means flush and sync every second # 1 means flush and sync every transaction # 2 means flush every transaction, sync every second # optional, default is 2 # # binlog_flush = 2 # binlog per-file size limit # optional, default is 128M, 0 means no limit # # binlog_max_log_size = 256M # per-thread stack size, only affects workers=threads mode # optional, default is 64K # # thread_stack = 128K # per-keyword expansion limit (for dict=keywords prefix searches) # optional, default is 0 (no limit) # # expansion_limit = 1000 # RT RAM chunks flush period # optional, default is 0 (no periodic flush) # # rt_flush_period = 900 # query log file format # optional, known values are plain and sphinxql, default is plain # # query_log_format = sphinxql # version string returned to MySQL network protocol clients # optional, default is empty (use Sphinx version) # # mysql_version_string = 5.0.37 # trusted plugin directory # optional, default is empty (disable UDFs) # # plugin_dir = /usr/local/sphinx/lib # default server-wide collation # optional, default is libc_ci # # collation_server = utf8_general_ci # server-wide locale for libc based collations # optional, default is C # # collation_libc_locale = ru_RU.UTF-8 # threaded server watchdog (only used in workers=threads mode) # optional, values are 0 and 1, default is 1 (watchdog on) # # watchdog = 1 # SphinxQL compatibility mode (legacy columns and their names) # optional, default is 0 (SQL compliant syntax and result sets) # # compat_sphinxql_magics = 1 } # --eof-- sphinx-2.0.4-release/sphinx08.sln0000644000176700017710000001652211566163456016165 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 10.00 # Visual Studio 2008 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "indexer", "win\indexer08.vcproj", "{405619C7-CC22-4FB8-9237-B196CB897355}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libsphinx", "win\libsphinx08.vcproj", "{6A1685DE-0265-4243-965F-96CB53EBBCA6}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "search", "win\search08.vcproj", "{571DC41A-2665-476C-ABED-3899324E19AB}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "searchd", "win\searchd08.vcproj", "{0BBD34CB-5891-477F-B665-3D7C9FC22A02}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "spelldump", "win\spelldump08.vcproj", "{AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "indextool", "win\indextool08.vcproj", "{6A78A67D-A743-4594-858A-A4F1C536A8C1}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "testrt", "win\testrt08.vcproj", "{651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tests", "win\tests08.vcproj", "{B47166A1-4827-4D80-97E3-743BDE61146F}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 Debug|x64 = Debug|x64 Release|Win32 = Release|Win32 Release|x64 = Release|x64 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|Win32.ActiveCfg = Debug|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|Win32.Build.0 = Debug|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|x64.ActiveCfg = Debug|x64 {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|x64.Build.0 = Debug|x64 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|Win32.ActiveCfg = Release|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|Win32.Build.0 = Release|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|x64.ActiveCfg = Release|x64 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|x64.Build.0 = Release|x64 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|Win32.ActiveCfg = Debug|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|Win32.Build.0 = Debug|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|x64.ActiveCfg = Debug|x64 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|x64.Build.0 = Debug|x64 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|Win32.ActiveCfg = Release|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|Win32.Build.0 = Release|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|x64.ActiveCfg = Release|x64 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|x64.Build.0 = Release|x64 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|Win32.ActiveCfg = Debug|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|Win32.Build.0 = Debug|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|x64.ActiveCfg = Debug|x64 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|x64.Build.0 = Debug|x64 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|Win32.ActiveCfg = Release|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|Win32.Build.0 = Release|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|x64.ActiveCfg = Release|x64 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|x64.Build.0 = Release|x64 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|Win32.ActiveCfg = Debug|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|Win32.Build.0 = Debug|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|x64.ActiveCfg = Debug|x64 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|x64.Build.0 = Debug|x64 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|Win32.ActiveCfg = Release|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|Win32.Build.0 = Release|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|x64.ActiveCfg = Release|x64 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|x64.Build.0 = Release|x64 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|Win32.ActiveCfg = Debug|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|Win32.Build.0 = Debug|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|x64.ActiveCfg = Debug|x64 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|x64.Build.0 = Debug|x64 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|Win32.ActiveCfg = Release|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|Win32.Build.0 = Release|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|x64.ActiveCfg = Release|x64 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|x64.Build.0 = Release|x64 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|Win32.ActiveCfg = Debug|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|Win32.Build.0 = Debug|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|x64.ActiveCfg = Debug|x64 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|x64.Build.0 = Debug|x64 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|Win32.ActiveCfg = Release|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|Win32.Build.0 = Release|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|x64.ActiveCfg = Release|x64 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|x64.Build.0 = Release|x64 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|Win32.ActiveCfg = Debug|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|Win32.Build.0 = Debug|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|x64.ActiveCfg = Debug|x64 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|x64.Build.0 = Debug|x64 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|Win32.ActiveCfg = Release|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|Win32.Build.0 = Release|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|x64.ActiveCfg = Release|x64 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|x64.Build.0 = Release|x64 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|Win32.ActiveCfg = Debug|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|Win32.Build.0 = Debug|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|x64.ActiveCfg = Debug|x64 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|x64.Build.0 = Debug|x64 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|Win32.ActiveCfg = Release|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|Win32.Build.0 = Release|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|x64.ActiveCfg = Release|x64 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sphinx-2.0.4-release/sphinx-min.conf.in0000644000176700017710000000214411723165371017320 0ustar deogardeogar# # Minimal Sphinx configuration sample (clean, simple, functional) # source src1 { type = mysql sql_host = localhost sql_user = test sql_pass = sql_db = test sql_port = 3306 # optional, default is 3306 sql_query = \ SELECT id, group_id, UNIX_TIMESTAMP(date_added) AS date_added, title, content \ FROM documents sql_attr_uint = group_id sql_attr_timestamp = date_added sql_query_info = SELECT * FROM documents WHERE id=$id } index test1 { source = src1 path = @CONFDIR@/data/test1 docinfo = extern charset_type = sbcs } index testrt { type = rt rt_mem_limit = 32M path = @CONFDIR@/data/testrt charset_type = utf-8 rt_field = title rt_field = content rt_attr_uint = gid } indexer { mem_limit = 32M } searchd { listen = 9312 listen = 9306:mysql41 log = @CONFDIR@/log/searchd.log query_log = @CONFDIR@/log/query.log read_timeout = 5 max_children = 30 pid_file = @CONFDIR@/log/searchd.pid max_matches = 1000 seamless_rotate = 1 preopen_indexes = 1 unlink_old = 1 workers = threads # for RT to work binlog_path = @CONFDIR@/data } sphinx-2.0.4-release/buildconf.sh0000755000176700017710000000011211104425007016233 0ustar deogardeogar#! /bin/sh autoheader \ && aclocal \ && automake --foreign \ && autoconf sphinx-2.0.4-release/configure0000755000176700017710000101131711723635623015666 0ustar deogardeogar#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.65 for sphinx 2.0.4. # # Report bugs to . # # # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, # 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 test \$(( 1 + 1 )) = 2 || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV export CONFIG_SHELL exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"} fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: shodan(at)sphinxsearch.com about your system, including any $0: error possibly output before this message. Then install $0: a modern shell, or manually run the script under such a $0: shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error ERROR [LINENO LOG_FD] # --------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with status $?, using 1 if that was 0. as_fn_error () { as_status=$?; test $as_status -eq 0 && as_status=1 if test "$3"; then as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 fi $as_echo "$as_me: error: $1" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi if test -x / >/dev/null 2>&1; then as_test_x='test -x' else if ls -dL / >/dev/null 2>&1; then as_ls_L_option=L else as_ls_L_option= fi as_test_x=' eval sh -c '\'' if test -d "$1"; then test -d "$1/."; else case $1 in #( -*)set "./$1";; esac; case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ???[sx]*):;;*)false;;esac;fi '\'' sh ' fi as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='sphinx' PACKAGE_TARNAME='sphinx' PACKAGE_VERSION='2.0.4' PACKAGE_STRING='sphinx 2.0.4' PACKAGE_BUGREPORT='shodan(at)sphinxsearch.com' PACKAGE_URL='' ac_unique_file="src/searchd.cpp" # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef STDC_HEADERS # include # include #else # ifdef HAVE_STDLIB_H # include # endif #endif #ifdef HAVE_STRING_H # if !defined STDC_HEADERS && defined HAVE_MEMORY_H # include # endif # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS CONFDIR USE_LIBSTEMMER_FALSE USE_LIBSTEMMER_TRUE USE_PGSQL_FALSE USE_PGSQL_TRUE PGSQL_CFLAGS PGSQL_LIBS pgconfig USE_MYSQL_FALSE USE_MYSQL_TRUE MYSQL_CFLAGS MYSQL_LIBS LIBRT LIBOBJS EGREP GREP CPP RANLIB am__fastdepCXX_FALSE am__fastdepCXX_TRUE CXXDEPMODE ac_ct_CXX CXXFLAGS CXX am__fastdepCC_FALSE am__fastdepCC_TRUE CCDEPMODE AMDEPBACKSLASH AMDEP_FALSE AMDEP_TRUE am__quote am__include DEPDIR OBJEXT EXEEXT ac_ct_CC CPPFLAGS LDFLAGS CFLAGS CC MAINT MAINTAINER_MODE_FALSE MAINTAINER_MODE_TRUE am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_maintainer_mode with_debug enable_dependency_tracking with_mysql with_mysql_includes with_mysql_libs with_static_mysql with_pgsql with_pgsql_includes with_pgsql_libs enable_id64 with_libstemmer with_iconv with_unixodbc with_syslog ' ac_precious_vars='build_alias host_alias target_alias CC CFLAGS LDFLAGS LIBS CPPFLAGS CXX CXXFLAGS CCC CPP' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error "unrecognized option: \`$ac_option' Try \`$0 --help' for more information." ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe $as_echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. If a cross compiler is detected then cross compile mode will be used." >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures sphinx 2.0.4 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/sphinx] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of sphinx 2.0.4:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-maintainer-mode enable make rules and dependencies not useful (and sometimes confusing) to the casual installer --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors --enable-id64 use 64-bit document and word IDs (default is no) Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-debug compile slower debug version (default is disabled) --with-mysql compile with MySQL support (default is enabled) --with-mysql-includes path to MySQL header files --with-mysql-libs path to MySQL libraries --with-static-mysql link statically with MySQL library (default is no) --with-pgsql compile with PostgreSQL support (default is disabled) --with-pgsql-includes path to PostgreSQL header files --with-pgsql-libs path to PostgreSQL libraries --with-libstemmer compile with libstemmer support (default is disabled) --with-iconv compile with iconv support (default is autodetect) --with-unixodbc compile with UnixODBC support (default is autodetect) --with-syslog compile with possibility to use syslog for logging (default is no) Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CXX C++ compiler command CXXFLAGS C++ compiler flags CPP C preprocessor Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF sphinx configure 2.0.4 generated by GNU Autoconf 2.65 Copyright (C) 2009 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_c_try_compile LINENO # -------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_compile # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_c_try_cpp LINENO # ---------------------- # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } >/dev/null && { test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_cpp # ac_fn_c_try_run LINENO # ---------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_c_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_run # ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_c_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ( cat <<\_ASBOX ## ----------------------------------------- ## ## Report this to shodan(at)sphinxsearch.com ## ## ----------------------------------------- ## _ASBOX ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_header_mongrel # ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_c_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_header_compile # ac_fn_c_check_type LINENO TYPE VAR INCLUDES # ------------------------------------------- # Tests whether TYPE exists after having included INCLUDES, setting cache # variable VAR accordingly. ac_fn_c_check_type () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 else eval "$3=no" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof ($2)) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof (($2))) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else eval "$3=yes" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_type # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || $as_test_x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_link # ac_fn_c_check_func LINENO FUNC VAR # ---------------------------------- # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_c_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_func # ac_fn_cxx_try_run LINENO # ------------------------ # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_cxx_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_run cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by sphinx $as_me 2.0.4, which was generated by GNU Autoconf 2.65. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo cat <<\_ASBOX ## ---------------- ## ## Cache variables. ## ## ---------------- ## _ASBOX echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo cat <<\_ASBOX ## ----------------- ## ## Output variables. ## ## ----------------- ## _ASBOX echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then cat <<\_ASBOX ## ------------------- ## ## File substitutions. ## ## ------------------- ## _ASBOX echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then cat <<\_ASBOX ## ----------- ## ## confdefs.h. ## ## ----------- ## _ASBOX echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then ac_site_file1=$CONFIG_SITE elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: checking build environment" >&5 $as_echo "checking build environment" >&6; } TMP=`echo checking build environment | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } ac_aux_dir= for ac_dir in config "$srcdir"/config; do for ac_t in install-sh install.sh shtool; do if test -f "$ac_dir/$ac_t"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/$ac_t -c" break 2 fi done done if test -z "$ac_aux_dir"; then as_fn_error "cannot find install-sh, install.sh, or shtool in config \"$srcdir\"/config" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. am__api_version='2.0.4' # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Just in case sleep 1 echo timestamp > conftest.file # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error "unsafe absolute working directory name" "$LINENO" 5;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; esac # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi rm -f conftest.file if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi test "$2" = conftest.file ) then # Ok. : else as_fn_error "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} fi if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if test "${ac_cv_path_mkdir+set}" = set; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } mkdir_p="$MKDIR_P" case $mkdir_p in [\\/$]* | ?:[\\/]*) ;; */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; esac for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_AWK+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if { as_var=ac_cv_prog_make_${ac_make}_set; eval "test \"\${$as_var+set}\" = set"; }; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='sphinx' VERSION='2.0.4' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. AMTAR=${AMTAR-"${am_missing_run}tar"} am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5 $as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; } # Check whether --enable-maintainer-mode was given. if test "${enable_maintainer_mode+set}" = set; then : enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval else USE_MAINTAINER_MODE=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5 $as_echo "$USE_MAINTAINER_MODE" >&6; } if test $USE_MAINTAINER_MODE = yes; then MAINTAINER_MODE_TRUE= MAINTAINER_MODE_FALSE='#' else MAINTAINER_MODE_TRUE='#' MAINTAINER_MODE_FALSE= fi MAINT=$MAINTAINER_MODE_TRUE ac_config_headers="$ac_config_headers config/config.h" # hack to locate expat/iconv in /usr/local on BSD systems CPPFLAGS="$CPPFLAGS -I/usr/local/include" LIBS="$LIBS -L/usr/local/lib" { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: checking for compiler programs" >&5 $as_echo "checking for compiler programs" >&6; } TMP=`echo checking for compiler programs | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } # Check whether --with-debug was given. if test "${with_debug+set}" = set; then : withval=$with_debug; ac_cv_use_debug=$withval else ac_cv_use_debug=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to compile debug version" >&5 $as_echo_n "checking whether to compile debug version... " >&6; } if test x$ac_cv_use_debug != xno; then SPHINX_CFLAGS="-Wall -g -D_FILE_OFFSET_BITS=64" SPHINX_INJECT_FLAGS="-D_FILE_OFFSET_BITS=64" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else SPHINX_CFLAGS="-Wall -g -D_FILE_OFFSET_BITS=64 -O3 -DNDEBUG" SPHINX_INJECT_FLAGS="-D_FILE_OFFSET_BITS=64 -DNDEBUG" { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test x$ac_env_CFLAGS_set != xset; then CFLAGS=$SPHINX_CFLAGS else CFLAGS="$CFLAGS $SPHINX_INJECT_FLAGS" fi if test x$ac_env_CXXFLAGS_set != xset; then CXXFLAGS=$SPHINX_CFLAGS else CXXFLAGS="$CXXFLAGS $SPHINX_INJECT_FLAGS" fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error "no acceptable C compiler found in \$PATH See \`config.log' for more details." "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 $as_echo_n "checking whether the C compiler works... " >&6; } ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else ac_file='' fi if test -z "$ac_file"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { as_fn_set_status 77 as_fn_error "C compiler cannot create executables See \`config.log' for more details." "$LINENO" 5; }; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 $as_echo_n "checking for C compiler default output file name... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 $as_echo "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 $as_echo_n "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details." "$LINENO" 5; } fi rm -f conftest conftest$ac_cv_exeext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 $as_echo "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 $as_echo_n "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error "cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details." "$LINENO" 5; } fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 $as_echo "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 $as_echo_n "checking for suffix of object files... " >&6; } if test "${ac_cv_objext+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error "cannot compute suffix of object files: cannot compile See \`config.log' for more details." "$LINENO" 5; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 $as_echo "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if test "${ac_cv_c_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if test "${ac_cv_prog_cc_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if test "${ac_cv_prog_cc_c89+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 $as_echo_n "checking for style of include used by $am_make... " >&6; } am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from `make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 $as_echo "$_am_result" >&6; } rm -f confinc confmf # Check whether --enable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then : enableval=$enable_dependency_tracking; fi if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if test "${ac_cv_cxx_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if test "${ac_cv_prog_cxx_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CXX_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __GNUC__ #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3) void main() {} #else syntax error #endif #endif _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else as_fn_error "Gcc version error. Minspec is 3.4" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat >>confdefs.h <<_ACEOF #define COMPILER "$CC `$CC -dumpversion`" _ACEOF cat >>confdefs.h <<_ACEOF #define OS_UNAME "`uname -a`" _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: checking for header files" >&5 $as_echo "checking for header files" >&6; } TMP=`echo checking for header files | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } # Checks for header files. ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if test "${ac_cv_prog_CPP+set}" = set; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details." "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 $as_echo_n "checking for grep that handles long lines and -e... " >&6; } if test "${ac_cv_path_GREP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$GREP"; then ac_path_GREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in *GNU*) ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'GREP' >> "conftest.nl" "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_GREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_GREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_GREP"; then as_fn_error "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_GREP=$GREP fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 $as_echo "$ac_cv_path_GREP" >&6; } GREP="$ac_cv_path_GREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 $as_echo_n "checking for egrep... " >&6; } if test "${ac_cv_path_EGREP+set}" = set; then : $as_echo_n "(cached) " >&6 else if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 then ac_cv_path_EGREP="$GREP -E" else if test -z "$EGREP"; then ac_path_EGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in *GNU*) ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'EGREP' >> "conftest.nl" "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_EGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_EGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_EGREP"; then as_fn_error "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_EGREP=$EGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 $as_echo "$ac_cv_path_EGREP" >&6; } EGREP="$ac_cv_path_EGREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if test "${ac_cv_header_stdc+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sys/wait.h that is POSIX.1 compatible" >&5 $as_echo_n "checking for sys/wait.h that is POSIX.1 compatible... " >&6; } if test "${ac_cv_header_sys_wait_h+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef WEXITSTATUS # define WEXITSTATUS(stat_val) ((unsigned int) (stat_val) >> 8) #endif #ifndef WIFEXITED # define WIFEXITED(stat_val) (((stat_val) & 255) == 0) #endif int main () { int s; wait (&s); s = WIFEXITED (s) ? WEXITSTATUS (s) : 1; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_sys_wait_h=yes else ac_cv_header_sys_wait_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_wait_h" >&5 $as_echo "$ac_cv_header_sys_wait_h" >&6; } if test $ac_cv_header_sys_wait_h = yes; then $as_echo "#define HAVE_SYS_WAIT_H 1" >>confdefs.h fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " eval as_val=\$$as_ac_Header if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in fcntl.h limits.h netdb.h netinet/in.h stdlib.h string.h sys/file.h sys/socket.h sys/time.h unistd.h pthread.h execinfo.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" eval as_val=\$$as_ac_Header if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done ac_fn_c_check_header_mongrel "$LINENO" "expat.h" "ac_cv_header_expat_h" "$ac_includes_default" if test "x$ac_cv_header_expat_h" = x""yes; then : have_expat_h=yes else have_expat_h=no fi ac_fn_c_check_header_mongrel "$LINENO" "iconv.h" "ac_cv_header_iconv_h" "$ac_includes_default" if test "x$ac_cv_header_iconv_h" = x""yes; then : have_iconv_h=yes else have_iconv_h=no fi ac_fn_c_check_header_mongrel "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default" if test "x$ac_cv_header_zlib_h" = x""yes; then : have_zlib_h=yes else have_zlib_h=no fi ac_fn_c_check_header_mongrel "$LINENO" "sql.h" "ac_cv_header_sql_h" "$ac_includes_default" if test "x$ac_cv_header_sql_h" = x""yes; then : have_sql_h=yes else have_sql_h=no fi ac_fn_c_check_header_mongrel "$LINENO" "syslog.h" "ac_cv_header_syslog_h" "$ac_includes_default" if test "x$ac_cv_header_syslog_h" = x""yes; then : have_syslog_h=yes else have_syslog_h=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: checking for types" >&5 $as_echo "checking for types" >&6; } TMP=`echo checking for types | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } # Checks for typedefs, structures, and compiler characteristics. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdbool.h that conforms to C99" >&5 $as_echo_n "checking for stdbool.h that conforms to C99... " >&6; } if test "${ac_cv_header_stdbool_h+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef bool "error: bool is not defined" #endif #ifndef false "error: false is not defined" #endif #if false "error: false is not 0" #endif #ifndef true "error: true is not defined" #endif #if true != 1 "error: true is not 1" #endif #ifndef __bool_true_false_are_defined "error: __bool_true_false_are_defined is not defined" #endif struct s { _Bool s: 1; _Bool t; } s; char a[true == 1 ? 1 : -1]; char b[false == 0 ? 1 : -1]; char c[__bool_true_false_are_defined == 1 ? 1 : -1]; char d[(bool) 0.5 == true ? 1 : -1]; bool e = &s; char f[(_Bool) 0.0 == false ? 1 : -1]; char g[true]; char h[sizeof (_Bool)]; char i[sizeof s.t]; enum { j = false, k = true, l = false * true, m = true * 256 }; /* The following fails for HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */ _Bool n[m]; char o[sizeof n == m * sizeof n[0] ? 1 : -1]; char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1]; # if defined __xlc__ || defined __GNUC__ /* Catch a bug in IBM AIX xlc compiler version 6.0.0.0 reported by James Lemley on 2005-10-05; see http://lists.gnu.org/archive/html/bug-coreutils/2005-10/msg00086.html This test is not quite right, since xlc is allowed to reject this program, as the initializer for xlcbug is not one of the forms that C requires support for. However, doing the test right would require a runtime test, and that would make cross-compilation harder. Let us hope that IBM fixes the xlc bug, and also adds support for this kind of constant expression. In the meantime, this test will reject xlc, which is OK, since our stdbool.h substitute should suffice. We also test this with GCC, where it should work, to detect more quickly whether someone messes up the test in the future. */ char digs[] = "0123456789"; int xlcbug = 1 / (&(digs + 5)[-2 + (bool) 1] == &digs[4] ? 1 : -1); # endif /* Catch a bug in an HP-UX C compiler. See http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html */ _Bool q = true; _Bool *pq = &q; int main () { *pq |= q; *pq |= ! q; /* Refer to every declared value, to avoid compiler optimizations. */ return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l + !m + !n + !o + !p + !q + !pq); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdbool_h=yes else ac_cv_header_stdbool_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdbool_h" >&5 $as_echo "$ac_cv_header_stdbool_h" >&6; } ac_fn_c_check_type "$LINENO" "_Bool" "ac_cv_type__Bool" "$ac_includes_default" if test "x$ac_cv_type__Bool" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE__BOOL 1 _ACEOF fi if test $ac_cv_header_stdbool_h = yes; then $as_echo "#define HAVE_STDBOOL_H 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 $as_echo_n "checking for an ANSI C-conforming const... " >&6; } if test "${ac_cv_c_const+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { /* FIXME: Include the comments suggested by Paul. */ #ifndef __cplusplus /* Ultrix mips cc rejects this. */ typedef int charset[2]; const charset cs; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; /* NEC SVR4.0.2 mips cc rejects this. */ struct point {int x, y;}; static struct point const zero = {0,0}; /* AIX XL C 1.02.0.0 rejects this. It does not let you subtract one const X* pointer from another in an arm of an if-expression whose if-part is not a constant expression */ const char *g = "string"; pcpcc = &g + (g ? g-g : 0); /* HPUX 7.0 cc rejects these. */ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; { /* SCO 3.2v4 cc rejects this. */ char *t; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; if (s) return 0; } { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ int x[] = {25, 17}; const int *foo = &x[0]; ++foo; } { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ typedef const int *iptr; iptr p = 0; ++p; } { /* AIX XL C 1.02.0.0 rejects this saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ struct s { int j; const int *ap[3]; }; struct s *b; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; if (!foo) return 0; } return !cs[0] && !zero.x; #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_c_const=yes else ac_cv_c_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 $as_echo "$ac_cv_c_const" >&6; } if test $ac_cv_c_const = no; then $as_echo "#define const /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5 $as_echo_n "checking for inline... " >&6; } if test "${ac_cv_c_inline+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_c_inline=no for ac_kw in inline __inline__ __inline; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __cplusplus typedef int foo_t; static $ac_kw foo_t static_foo () {return 0; } $ac_kw foo_t foo () {return 0; } #endif _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_c_inline=$ac_kw fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext test "$ac_cv_c_inline" != no && break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5 $as_echo "$ac_cv_c_inline" >&6; } case $ac_cv_c_inline in inline | yes) ;; *) case $ac_cv_c_inline in no) ac_val=;; *) ac_val=$ac_cv_c_inline;; esac cat >>confdefs.h <<_ACEOF #ifndef __cplusplus #define inline $ac_val #endif _ACEOF ;; esac ac_fn_c_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define off_t long int _ACEOF fi ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned int _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether time.h and sys/time.h may both be included" >&5 $as_echo_n "checking whether time.h and sys/time.h may both be included... " >&6; } if test "${ac_cv_header_time+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include int main () { if ((struct tm *) 0) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_time=yes else ac_cv_header_time=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_time" >&5 $as_echo "$ac_cv_header_time" >&6; } if test $ac_cv_header_time = yes; then $as_echo "#define TIME_WITH_SYS_TIME 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: checking for library functions" >&5 $as_echo "checking for library functions" >&6; } TMP=`echo checking for library functions | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } # Checks for library functions. ac_fn_c_check_type "$LINENO" "pid_t" "ac_cv_type_pid_t" "$ac_includes_default" if test "x$ac_cv_type_pid_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define pid_t int _ACEOF fi for ac_header in vfork.h do : ac_fn_c_check_header_mongrel "$LINENO" "vfork.h" "ac_cv_header_vfork_h" "$ac_includes_default" if test "x$ac_cv_header_vfork_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VFORK_H 1 _ACEOF fi done for ac_func in fork vfork do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" eval as_val=\$$as_ac_var if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done if test "x$ac_cv_func_fork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working fork" >&5 $as_echo_n "checking for working fork... " >&6; } if test "${ac_cv_func_fork_works+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_fork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* By Ruediger Kuhlmann. */ return fork () < 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_fork_works=yes else ac_cv_func_fork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fork_works" >&5 $as_echo "$ac_cv_func_fork_works" >&6; } else ac_cv_func_fork_works=$ac_cv_func_fork fi if test "x$ac_cv_func_fork_works" = xcross; then case $host in *-*-amigaos* | *-*-msdosdjgpp*) # Override, as these systems have only a dummy fork() stub ac_cv_func_fork_works=no ;; *) ac_cv_func_fork_works=yes ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&2;} fi ac_cv_func_vfork_works=$ac_cv_func_vfork if test "x$ac_cv_func_vfork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working vfork" >&5 $as_echo_n "checking for working vfork... " >&6; } if test "${ac_cv_func_vfork_works+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_vfork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Thanks to Paul Eggert for this test. */ $ac_includes_default #include #ifdef HAVE_VFORK_H # include #endif /* On some sparc systems, changes by the child to local and incoming argument registers are propagated back to the parent. The compiler is told about this with #include , but some compilers (e.g. gcc -O) don't grok . Test for this by using a static variable whose address is put into a register that is clobbered by the vfork. */ static void #ifdef __cplusplus sparc_address_test (int arg) # else sparc_address_test (arg) int arg; #endif { static pid_t child; if (!child) { child = vfork (); if (child < 0) { perror ("vfork"); _exit(2); } if (!child) { arg = getpid(); write(-1, "", 0); _exit (arg); } } } int main () { pid_t parent = getpid (); pid_t child; sparc_address_test (0); child = vfork (); if (child == 0) { /* Here is another test for sparc vfork register problems. This test uses lots of local variables, at least as many local variables as main has allocated so far including compiler temporaries. 4 locals are enough for gcc 1.40.3 on a Solaris 4.1.3 sparc, but we use 8 to be safe. A buggy compiler should reuse the register of parent for one of the local variables, since it will think that parent can't possibly be used any more in this routine. Assigning to the local variable will thus munge parent in the parent process. */ pid_t p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(), p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid(); /* Convince the compiler that p..p7 are live; otherwise, it might use the same hardware register for all 8 local variables. */ if (p != p1 || p != p2 || p != p3 || p != p4 || p != p5 || p != p6 || p != p7) _exit(1); /* On some systems (e.g. IRIX 3.3), vfork doesn't separate parent from child file descriptors. If the child closes a descriptor before it execs or exits, this munges the parent's descriptor as well. Test for this by closing stdout in the child. */ _exit(close(fileno(stdout)) != 0); } else { int status; struct stat st; while (wait(&status) != child) ; return ( /* Was there some problem with vforking? */ child < 0 /* Did the child fail? (This shouldn't happen.) */ || status /* Did the vfork/compiler bug occur? */ || parent != getpid() /* Did the file descriptor bug occur? */ || fstat(fileno(stdout), &st) != 0 ); } } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_vfork_works=yes else ac_cv_func_vfork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_vfork_works" >&5 $as_echo "$ac_cv_func_vfork_works" >&6; } fi; if test "x$ac_cv_func_fork_works" = xcross; then ac_cv_func_vfork_works=$ac_cv_func_vfork { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&2;} fi if test "x$ac_cv_func_vfork_works" = xyes; then $as_echo "#define HAVE_WORKING_VFORK 1" >>confdefs.h else $as_echo "#define vfork fork" >>confdefs.h fi if test "x$ac_cv_func_fork_works" = xyes; then $as_echo "#define HAVE_WORKING_FORK 1" >>confdefs.h fi for ac_header in stdlib.h do : ac_fn_c_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible malloc" >&5 $as_echo_n "checking for GNU libc compatible malloc... " >&6; } if test "${ac_cv_func_malloc_0_nonnull+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_malloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *malloc (); #endif int main () { return ! malloc (0); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_malloc_0_nonnull=yes else ac_cv_func_malloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_malloc_0_nonnull" >&5 $as_echo "$ac_cv_func_malloc_0_nonnull" >&6; } if test $ac_cv_func_malloc_0_nonnull = yes; then : $as_echo "#define HAVE_MALLOC 1" >>confdefs.h else $as_echo "#define HAVE_MALLOC 0" >>confdefs.h case " $LIBOBJS " in *" malloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; esac $as_echo "#define malloc rpl_malloc" >>confdefs.h fi for ac_header in stdlib.h do : ac_fn_c_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible realloc" >&5 $as_echo_n "checking for GNU libc compatible realloc... " >&6; } if test "${ac_cv_func_realloc_0_nonnull+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_realloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *realloc (); #endif int main () { return ! realloc (0, 0); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_realloc_0_nonnull=yes else ac_cv_func_realloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_realloc_0_nonnull" >&5 $as_echo "$ac_cv_func_realloc_0_nonnull" >&6; } if test $ac_cv_func_realloc_0_nonnull = yes; then : $as_echo "#define HAVE_REALLOC 1" >>confdefs.h else $as_echo "#define HAVE_REALLOC 0" >>confdefs.h case " $LIBOBJS " in *" realloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS realloc.$ac_objext" ;; esac $as_echo "#define realloc rpl_realloc" >>confdefs.h fi for ac_header in sys/select.h sys/socket.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" eval as_val=\$$as_ac_Header if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking types of arguments for select" >&5 $as_echo_n "checking types of arguments for select... " >&6; } if test "${ac_cv_func_select_args+set}" = set; then : $as_echo_n "(cached) " >&6 else for ac_arg234 in 'fd_set *' 'int *' 'void *'; do for ac_arg1 in 'int' 'size_t' 'unsigned long int' 'unsigned int'; do for ac_arg5 in 'struct timeval *' 'const struct timeval *'; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_SYS_SELECT_H # include #endif #ifdef HAVE_SYS_SOCKET_H # include #endif int main () { extern int select ($ac_arg1, $ac_arg234, $ac_arg234, $ac_arg234, $ac_arg5); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_func_select_args="$ac_arg1,$ac_arg234,$ac_arg5"; break 3 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done done done # Provide a safe default value. : ${ac_cv_func_select_args='int,int *,struct timeval *'} fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_select_args" >&5 $as_echo "$ac_cv_func_select_args" >&6; } ac_save_IFS=$IFS; IFS=',' set dummy `echo "$ac_cv_func_select_args" | sed 's/\*/\*/g'` IFS=$ac_save_IFS shift cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG1 $1 _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG234 ($2) _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG5 ($3) _ACEOF rm -f conftest* { $as_echo "$as_me:${as_lineno-$LINENO}: checking return type of signal handlers" >&5 $as_echo_n "checking return type of signal handlers... " >&6; } if test "${ac_cv_type_signal+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { return *(signal (0, 0)) (0) == 1; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_type_signal=int else ac_cv_type_signal=void fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_signal" >&5 $as_echo "$ac_cv_type_signal" >&6; } cat >>confdefs.h <<_ACEOF #define RETSIGTYPE $ac_cv_type_signal _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if test "${ac_cv_func_lstat_dereferences_slashed_symlink+set}" = set; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stat accepts an empty string" >&5 $as_echo_n "checking whether stat accepts an empty string... " >&6; } if test "${ac_cv_func_stat_empty_string_bug+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_stat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return stat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_stat_empty_string_bug=no else ac_cv_func_stat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_stat_empty_string_bug" >&5 $as_echo "$ac_cv_func_stat_empty_string_bug" >&6; } if test $ac_cv_func_stat_empty_string_bug = yes; then case " $LIBOBJS " in *" stat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS stat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_STAT_EMPTY_STRING_BUG 1 _ACEOF fi for ac_func in vprintf do : ac_fn_c_check_func "$LINENO" "vprintf" "ac_cv_func_vprintf" if test "x$ac_cv_func_vprintf" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VPRINTF 1 _ACEOF ac_fn_c_check_func "$LINENO" "_doprnt" "ac_cv_func__doprnt" if test "x$ac_cv_func__doprnt" = x""yes; then : $as_echo "#define HAVE_DOPRNT 1" >>confdefs.h fi fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing setsockopt" >&5 $as_echo_n "checking for library containing setsockopt... " >&6; } if test "${ac_cv_search_setsockopt+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char setsockopt (); int main () { return setsockopt (); ; return 0; } _ACEOF for ac_lib in '' socket; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_setsockopt=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_setsockopt+set}" = set; then : break fi done if test "${ac_cv_search_setsockopt+set}" = set; then : else ac_cv_search_setsockopt=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_setsockopt" >&5 $as_echo "$ac_cv_search_setsockopt" >&6; } ac_res=$ac_cv_search_setsockopt if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing gethostbyname" >&5 $as_echo_n "checking for library containing gethostbyname... " >&6; } if test "${ac_cv_search_gethostbyname+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gethostbyname (); int main () { return gethostbyname (); ; return 0; } _ACEOF for ac_lib in '' nsl socket resolv; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_gethostbyname=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_gethostbyname+set}" = set; then : break fi done if test "${ac_cv_search_gethostbyname+set}" = set; then : else ac_cv_search_gethostbyname=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_gethostbyname" >&5 $as_echo "$ac_cv_search_gethostbyname" >&6; } ac_res=$ac_cv_search_gethostbyname if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing XML_Parse" >&5 $as_echo_n "checking for library containing XML_Parse... " >&6; } if test "${ac_cv_search_XML_Parse+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char XML_Parse (); int main () { return XML_Parse (); ; return 0; } _ACEOF for ac_lib in '' expat; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_XML_Parse=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_XML_Parse+set}" = set; then : break fi done if test "${ac_cv_search_XML_Parse+set}" = set; then : else ac_cv_search_XML_Parse=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_XML_Parse" >&5 $as_echo "$ac_cv_search_XML_Parse" >&6; } ac_res=$ac_cv_search_XML_Parse if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_libexpat=yes else have_libexpat=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing iconv" >&5 $as_echo_n "checking for library containing iconv... " >&6; } if test "${ac_cv_search_iconv+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char iconv (); int main () { return iconv (); ; return 0; } _ACEOF for ac_lib in '' iconv; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_iconv=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_iconv+set}" = set; then : break fi done if test "${ac_cv_search_iconv+set}" = set; then : else ac_cv_search_iconv=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_iconv" >&5 $as_echo "$ac_cv_search_iconv" >&6; } ac_res=$ac_cv_search_iconv if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_libiconv=yes else have_libiconv=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing inflate" >&5 $as_echo_n "checking for library containing inflate... " >&6; } if test "${ac_cv_search_inflate+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char inflate (); int main () { return inflate (); ; return 0; } _ACEOF for ac_lib in '' z; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_inflate=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_inflate+set}" = set; then : break fi done if test "${ac_cv_search_inflate+set}" = set; then : else ac_cv_search_inflate=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_inflate" >&5 $as_echo "$ac_cv_search_inflate" >&6; } ac_res=$ac_cv_search_inflate if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_lz=yes else have_lz=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing logf" >&5 $as_echo_n "checking for library containing logf... " >&6; } if test "${ac_cv_search_logf+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char logf (); int main () { return logf (); ; return 0; } _ACEOF for ac_lib in '' m; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_logf=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_logf+set}" = set; then : break fi done if test "${ac_cv_search_logf+set}" = set; then : else ac_cv_search_logf=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_logf" >&5 $as_echo "$ac_cv_search_logf" >&6; } ac_res=$ac_cv_search_logf if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi for ac_func in dup2 gethostbyname gettimeofday memmove memset select socket strcasecmp strchr strerror strncasecmp strstr strtol logf pread do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" eval as_val=\$$as_ac_var if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done for ac_func in backtrace backtrace_symbols do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" eval as_val=\$$as_ac_var if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done # most systems require the program be linked with librt library to use # the function clock_gettime my_save_LIBS="$LIBS" LIBS="" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for clock_gettime in -lrt" >&5 $as_echo_n "checking for clock_gettime in -lrt... " >&6; } if test "${ac_cv_lib_rt_clock_gettime+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lrt $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char clock_gettime (); int main () { return clock_gettime (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_rt_clock_gettime=yes else ac_cv_lib_rt_clock_gettime=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_rt_clock_gettime" >&5 $as_echo "$ac_cv_lib_rt_clock_gettime" >&6; } if test "x$ac_cv_lib_rt_clock_gettime" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBRT 1 _ACEOF LIBS="-lrt $LIBS" fi LIBRT=$LIBS LIBS="$my_save_LIBS" LIBS="$LIBS $LIBRT" for ac_func in clock_gettime do : ac_fn_c_check_func "$LINENO" "clock_gettime" "ac_cv_func_clock_gettime" if test "x$ac_cv_func_clock_gettime" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_CLOCK_GETTIME 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LOCK_EX in sys/file.h" >&5 $as_echo_n "checking for LOCK_EX in sys/file.h... " >&6; } if test "${ac_cv_define_LOCK_EX+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifdef LOCK_EX YES_IS_DEFINED #endif _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "YES_IS_DEFINED" >/dev/null 2>&1; then : ac_cv_define_LOCK_EX=yes else ac_cv_define_LOCK_EX=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_define_LOCK_EX" >&5 $as_echo "$ac_cv_define_LOCK_EX" >&6; } if test "$ac_cv_define_LOCK_EX" = "yes"; then $as_echo "#define HAVE_LOCK_EX 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for F_SETLKW in fcntl.h" >&5 $as_echo_n "checking for F_SETLKW in fcntl.h... " >&6; } if test "${ac_cv_define_F_SETLKW+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifdef F_SETLKW YES_IS_DEFINED #endif _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "YES_IS_DEFINED" >/dev/null 2>&1; then : ac_cv_define_F_SETLKW=yes else ac_cv_define_F_SETLKW=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_define_F_SETLKW" >&5 $as_echo "$ac_cv_define_F_SETLKW" >&6; } if test "$ac_cv_define_F_SETLKW" = "yes"; then $as_echo "#define HAVE_F_SETLKW 1" >>confdefs.h fi # check for dlopen # FIXME! technically, only needed in searchd # but as UDF manager is curently in libsphinx, we link everything { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if test "${ac_cv_lib_dl_dlopen+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBDL 1 _ACEOF LIBS="-ldl $LIBS" fi for ac_func in dlopen dlerror do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" eval as_val=\$$as_ac_var if test "x$as_val" = x""yes; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: configuring Sphinx" >&5 $as_echo "configuring Sphinx" >&6; } TMP=`echo configuring Sphinx | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } # check for pthreads pthread_prog=" #include #include void * thread_routine ( void * data ) { return data; } int main () { pthread_t thd; pthread_mutexattr_t mattr; pthread_once_t once_init = PTHREAD_ONCE_INIT; int data = 1; pthread_mutexattr_init ( &mattr ); return pthread_create ( &thd, NULL, thread_routine, &data ); } " # check for needed cflags { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFLAGS needed for pthreads" >&5 $as_echo_n "checking for CFLAGS needed for pthreads... " >&6; } if test "${sphinx_cv_pthreads_cflags+set}" = set; then : $as_echo_n "(cached) " >&6 else save_cflags=$CFLAGS for flag in none -kthread -pthread -pthreads -mt -mthreads -Kthread -threads; do CFLAGS=$save_cflags test "x$flag" != "xnone" && CFLAGS="$CFLAGS $flag" if test x$cross_compiling = xno ; then if test "$cross_compiling" = yes; then : pthreads_try_run=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $pthread_prog _ACEOF if ac_fn_c_try_run "$LINENO"; then : pthreads_try_run=yes else pthreads_try_run=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi if test x$pthreads_try_run = xyes ; then sphinx_cv_pthreads_cflags="$flag" break fi else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $pthread_prog _ACEOF if ac_fn_c_try_compile "$LINENO"; then : pthreads_try_compile=yes else pthreads_try_compile=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test x$pthreads_try_compile = xyes ; then sphinx_cv_pthreads_cflags="$flag" break fi fi done CFLAGS=$save_cflags fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $sphinx_cv_pthreads_cflags" >&5 $as_echo "$sphinx_cv_pthreads_cflags" >&6; } if test -n "$sphinx_cv_pthreads_cflags"; then have_pthreads=yes if test "x$sphinx_cv_pthreads_cflags" != "xnone"; then CPPFLAGS="$CPPFLAGS $sphinx_cv_pthreads_cflags" fi fi # check for needed libs { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBS needed for pthreads" >&5 $as_echo_n "checking for LIBS needed for pthreads... " >&6; } if test "${sphinx_cv_pthreads_libs+set}" = set; then : $as_echo_n "(cached) " >&6 else save_libs=$LIBS for lib in -lpthread -lpthreads -lc_r; do LIBS="$save_libs $lib" if test x$cross_compiling = xno ; then if test "$cross_compiling" = yes; then : pthreads_try_run=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $pthread_prog _ACEOF if ac_fn_c_try_run "$LINENO"; then : pthreads_try_run=yes else pthreads_try_run=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi if test x$pthreads_try_run = xyes ; then sphinx_cv_pthreads_libs=$lib break fi else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $pthread_prog _ACEOF if ac_fn_c_try_link "$LINENO"; then : pthreads_try_link=yes else pthreads_try_link=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test x$pthreads_try_link = xyes ; then sphinx_cv_pthreads_libs=$lib break fi fi done LIBS=$save_libs fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $sphinx_cv_pthreads_libs" >&5 $as_echo "$sphinx_cv_pthreads_libs" >&6; } if test -n "$sphinx_cv_pthreads_libs"; then have_pthreads=yes LIBS="$LIBS $sphinx_cv_pthreads_libs" fi # final check { $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthreads" >&5 $as_echo_n "checking for pthreads... " >&6; } if test x$have_pthreads = xyes; then if test x$cross_compiling = xno; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: found" >&5 $as_echo "found" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: assumed as found (cross-compiling)" >&5 $as_echo "assumed as found (cross-compiling)" >&6; } fi else as_fn_error "no working pthreads library found" "$LINENO" 5 fi for ac_func in pthread_mutex_timedlock do : ac_fn_c_check_func "$LINENO" "pthread_mutex_timedlock" "ac_cv_func_pthread_mutex_timedlock" if test "x$ac_cv_func_pthread_mutex_timedlock" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_PTHREAD_MUTEX_TIMEDLOCK 1 _ACEOF fi done # check if we should compile with MySQL support # Check whether --with-mysql was given. if test "${with_mysql+set}" = set; then : withval=$with_mysql; ac_cv_use_mysql=$withval else ac_cv_use_mysql=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to compile with MySQL support" >&5 $as_echo_n "checking whether to compile with MySQL support... " >&6; } if test x$ac_cv_use_mysql != xno; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } mysqlconfig_locations="mysql_config /usr/bin/mysql_config /usr/local/bin/mysql_config /usr/local/mysql/bin/mysql_config /opt/mysql/bin/mysql_config /usr/pkg/bin/mysql_config" user_mysql_includes= user_mysql_libs= # check explicit MySQL root for mysql_config, include, lib if test x$ac_cv_use_mysql != xyes -a x$ac_cv_use_mysql != xno then mysqlroot=`echo $ac_cv_use_mysql | sed -e 's+/$++'` if test -x "$mysqlroot/bin/mysql_config" then # if there's mysql_config, that's the best route mysqlconfig_locations="$mysqlroot/bin/mysql_config" elif test -d "$mysqlroot/include" -a -d "$mysqlroot/lib" then # explicit root; do not check well-known paths mysqlconfig_locations= # includes if test -d "$mysqlroot/include/mysql" then user_mysql_includes="$mysqlroot/include/mysql" else user_mysql_includes="$mysqlroot/include" fi # libs if test -d "$mysqlroot/lib/mysql" then user_mysql_libs="$mysqlroot/lib/mysql" else user_mysql_libs="$mysqlroot/lib" fi else as_fn_error "invalid MySQL root directory '$mysqlroot'; neither bin/mysql_config, nor include/ and lib/ were found there" "$LINENO" 5 fi fi # try running mysql_config { $as_echo "$as_me:${as_lineno-$LINENO}: checking for mysql_config" >&5 $as_echo_n "checking for mysql_config... " >&6; } for mysqlconfig in $mysqlconfig_locations do if test -n "$mysqlconfig" then MYSQL_CFLAGS=`${mysqlconfig} --cflags 2>/dev/null` MYSQL_LIBS=`${mysqlconfig} --libs 2>/dev/null` if test $? -eq 0 then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $mysqlconfig" >&5 $as_echo "$mysqlconfig" >&6; } mysqlconfig= break else MYSQL_CFLAGS= MYSQL_LIBS= fi fi done if test -n "$mysqlconfig" then mysqlconfig_used= { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 $as_echo "not found" >&6; } else mysqlconfig_used=yes fi # if there's nothing from mysql_config, check well-known include paths # explicit overrides will be applied later if test -z "$MYSQL_CFLAGS" then for CANDIDATE in "$user_mysql_includes" "/usr/local/mysql/include" "/usr/local/mysql/include/mysql" \ "/usr/include/mysql" do if test -n "$CANDIDATE" -a -r "$CANDIDATE/mysql.h" then MYSQL_CFLAGS="-I$CANDIDATE" break fi done fi # if there's nothing from mysql_config, check well-known library paths # explicit overrides will be applied later if test -z "$MYSQL_LIBS" then for CANDIDATE in "$user_mysql_libs" "/usr/lib64/mysql" \ "/usr/local/mysql/lib/mysql" "/usr/local/mysql/lib" \ "/usr/local/lib/mysql" "/usr/lib/mysql" \ "/opt/mysql/lib/mysql" "/usr/pkg/lib/mysql" do if test -n "$CANDIDATE" -a -d "$CANDIDATE" then MYSQL_LIBS="-L$CANDIDATE -lmysqlclient -lz" break fi done fi # apply explicit include path overrides # Check whether --with-mysql-includes was given. if test "${with_mysql_includes+set}" = set; then : withval=$with_mysql_includes; ac_cv_mysql_includes=$withval fi if test -n "$ac_cv_mysql_includes" then MYSQL_CFLAGS="-I$ac_cv_mysql_includes" fi # apply explicit lib path overrides # Check whether --with-mysql-libs was given. if test "${with_mysql_libs+set}" = set; then : withval=$with_mysql_libs; ac_cv_mysql_libs=$withval fi if test -n "$ac_cv_mysql_libs" then # Trim trailing '.libs' if user passed it in --with-mysql-libs option ac_cv_mysql_libs=`echo ${ac_cv_mysql_libs} | sed -e 's/.libs$//' \ -e 's+.libs/$++'` MYSQL_LIBS="-L$ac_cv_mysql_libs -lmysqlclient -lz" fi # if we got options from mysqlconfig try to actually use them if test -n "$mysqlconfig_used" -a -n "$MYSQL_CFLAGS" -a -n "$MYSQL_LIBS" then _CFLAGS=$CFLAGS _LIBS=$LIBS CFLAGS="$CFLAGS $MYSQL_CFLAGS" LIBS="$LIBS $MYSQL_LIBS" ac_fn_c_check_func "$LINENO" "mysql_real_connect" "ac_cv_func_mysql_real_connect" if test "x$ac_cv_func_mysql_real_connect" = x""yes; then : else # if mysql binary was built using a different compiler and we # got options from mysql_config some of them might not work # with compiler we will be using # throw away everything that isn't one of -D -L -I -l and retry MYSQL_CFLAGS=`echo $MYSQL_CFLAGS | sed -e 's/-[^DLIl][^ ]*//g'` MYSQL_LIBS=`echo $MYSQL_LIBS | sed -e 's/-[^DLIl][^ ]*//g'` CFLAGS="$_CFLAGS $MYSQL_CFLAGS" LIBS="$_LIBS $MYSQL_LIBS" unset ac_cv_func_mysql_real_connect ac_fn_c_check_func "$LINENO" "mysql_real_connect" "ac_cv_func_mysql_real_connect" if test "x$ac_cv_func_mysql_real_connect" = x""yes; then : else # ... that didn't help # clear flags, the code below will complain MYSQL_CFLAGS= MYSQL_LIBS= fi fi CFLAGS=$_CFLAGS LIBS=$_LIBS fi # now that we did all we could, perform final checks { $as_echo "$as_me:${as_lineno-$LINENO}: checking MySQL include files" >&5 $as_echo_n "checking MySQL include files... " >&6; } if test -z "$MYSQL_CFLAGS" then as_fn_error "missing include files. ****************************************************************************** ERROR: cannot find MySQL include files. Check that you do have MySQL include files installed. The package name is typically 'mysql-devel'. If include files are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify includes location explicitly, using --with-mysql-includes; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** " "$LINENO" 5 else { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MYSQL_CFLAGS" >&5 $as_echo "$MYSQL_CFLAGS" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking MySQL libraries" >&5 $as_echo_n "checking MySQL libraries... " >&6; } if test -z "$MYSQL_LIBS" then as_fn_error "missing libraries. ****************************************************************************** ERROR: cannot find MySQL libraries. Check that you do have MySQL libraries installed. The package name is typically 'mysql-devel'. If libraries are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify libraries location explicitly, using --with-mysql-libs; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** " "$LINENO" 5 else { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MYSQL_LIBS" >&5 $as_echo "$MYSQL_LIBS" >&6; } fi $as_echo "#define USE_MYSQL 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test x$ac_cv_use_mysql != xno; then USE_MYSQL_TRUE= USE_MYSQL_FALSE='#' else USE_MYSQL_TRUE='#' USE_MYSQL_FALSE= fi # check if we should statically link the mysql library # Check whether --with-static-mysql was given. if test "${with_static_mysql+set}" = set; then : withval=$with_static_mysql; ac_cv_use_static_mysql=$withval else ac_cv_use_static_mysql=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to link statically with MySQL support" >&5 $as_echo_n "checking whether to link statically with MySQL support... " >&6; } if test x$ac_cv_use_mysql != xno; then if test x$ac_cv_use_static_mysql != xno; then mysqlconfig_locations="mysql_config /usr/bin/mysql_config /usr/local/bin/mysql_config /usr/local/mysql/bin/mysql_config /opt/mysql/bin/mysql_config /usr/pkg/bin/mysql_config" user_mysql_includes= user_mysql_libs= # check explicit MySQL root for mysql_config, include, lib if test x$ac_cv_use_static_mysql != xyes -a x$ac_cv_use_static_mysql != xno then mysqlroot=`echo $ac_cv_use_static_mysql | sed -e 's+/$++'` if test -x "$mysqlroot/bin/mysql_config" then # if there's mysql_config, that's the best route mysqlconfig_locations="$mysqlroot/bin/mysql_config" elif test -d "$mysqlroot/include" -a -d "$mysqlroot/lib" then # explicit root; do not check well-known paths mysqlconfig_locations= # includes if test -d "$mysqlroot/include/mysql" then user_mysql_includes="$mysqlroot/include/mysql" else user_mysql_includes="$mysqlroot/include" fi # libs if test -d "$mysqlroot/lib/mysql" then user_mysql_libs="$mysqlroot/lib/mysql" else user_mysql_libs="$mysqlroot/lib" fi else as_fn_error "invalid MySQL root directory '$mysqlroot'; neither bin/mysql_config, nor include/ and lib/ were found there" "$LINENO" 5 fi fi # try running mysql_config { $as_echo "$as_me:${as_lineno-$LINENO}: checking for mysql_config" >&5 $as_echo_n "checking for mysql_config... " >&6; } for mysqlconfig in $mysqlconfig_locations do if test -n "$mysqlconfig" then MYSQL_CFLAGS=`${mysqlconfig} --cflags 2>/dev/null` MYSQL_LIBS=`${mysqlconfig} --libs 2>/dev/null` if test $? -eq 0 then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $mysqlconfig" >&5 $as_echo "$mysqlconfig" >&6; } mysqlconfig= break else MYSQL_CFLAGS= MYSQL_LIBS= fi fi done if test -n "$mysqlconfig" then mysqlconfig_used= { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 $as_echo "not found" >&6; } else mysqlconfig_used=yes fi # if there's nothing from mysql_config, check well-known include paths # explicit overrides will be applied later if test -z "$MYSQL_CFLAGS" then for CANDIDATE in "$user_mysql_includes" "/usr/local/mysql/include" "/usr/local/mysql/include/mysql" \ "/usr/include/mysql" do if test -n "$CANDIDATE" -a -r "$CANDIDATE/mysql.h" then MYSQL_CFLAGS="-I$CANDIDATE" break fi done fi # if there's nothing from mysql_config, check well-known library paths # explicit overrides will be applied later if test -z "$MYSQL_LIBS" then for CANDIDATE in "$user_mysql_libs" "/usr/lib64/mysql" \ "/usr/local/mysql/lib/mysql" "/usr/local/mysql/lib" \ "/usr/local/lib/mysql" "/usr/lib/mysql" \ "/opt/mysql/lib/mysql" "/usr/pkg/lib/mysql" do if test -n "$CANDIDATE" -a -d "$CANDIDATE" then MYSQL_LIBS="-L$CANDIDATE -lmysqlclient -lz" break fi done fi # apply explicit include path overrides # Check whether --with-mysql-includes was given. if test "${with_mysql_includes+set}" = set; then : withval=$with_mysql_includes; ac_cv_mysql_includes=$withval fi if test -n "$ac_cv_mysql_includes" then MYSQL_CFLAGS="-I$ac_cv_mysql_includes" fi # apply explicit lib path overrides # Check whether --with-mysql-libs was given. if test "${with_mysql_libs+set}" = set; then : withval=$with_mysql_libs; ac_cv_mysql_libs=$withval fi if test -n "$ac_cv_mysql_libs" then # Trim trailing '.libs' if user passed it in --with-mysql-libs option ac_cv_mysql_libs=`echo ${ac_cv_mysql_libs} | sed -e 's/.libs$//' \ -e 's+.libs/$++'` MYSQL_LIBS="-L$ac_cv_mysql_libs -lmysqlclient -lz" fi # if we got options from mysqlconfig try to actually use them if test -n "$mysqlconfig_used" -a -n "$MYSQL_CFLAGS" -a -n "$MYSQL_LIBS" then _CFLAGS=$CFLAGS _LIBS=$LIBS CFLAGS="$CFLAGS $MYSQL_CFLAGS" LIBS="$LIBS $MYSQL_LIBS" ac_fn_c_check_func "$LINENO" "mysql_real_connect" "ac_cv_func_mysql_real_connect" if test "x$ac_cv_func_mysql_real_connect" = x""yes; then : else # if mysql binary was built using a different compiler and we # got options from mysql_config some of them might not work # with compiler we will be using # throw away everything that isn't one of -D -L -I -l and retry MYSQL_CFLAGS=`echo $MYSQL_CFLAGS | sed -e 's/-[^DLIl][^ ]*//g'` MYSQL_LIBS=`echo $MYSQL_LIBS | sed -e 's/-[^DLIl][^ ]*//g'` CFLAGS="$_CFLAGS $MYSQL_CFLAGS" LIBS="$_LIBS $MYSQL_LIBS" unset ac_cv_func_mysql_real_connect ac_fn_c_check_func "$LINENO" "mysql_real_connect" "ac_cv_func_mysql_real_connect" if test "x$ac_cv_func_mysql_real_connect" = x""yes; then : else # ... that didn't help # clear flags, the code below will complain MYSQL_CFLAGS= MYSQL_LIBS= fi fi CFLAGS=$_CFLAGS LIBS=$_LIBS fi # now that we did all we could, perform final checks { $as_echo "$as_me:${as_lineno-$LINENO}: checking MySQL include files" >&5 $as_echo_n "checking MySQL include files... " >&6; } if test -z "$MYSQL_CFLAGS" then as_fn_error "missing include files. ****************************************************************************** ERROR: cannot find MySQL include files. Check that you do have MySQL include files installed. The package name is typically 'mysql-devel'. If include files are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify includes location explicitly, using --with-mysql-includes; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** " "$LINENO" 5 else { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MYSQL_CFLAGS" >&5 $as_echo "$MYSQL_CFLAGS" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking MySQL libraries" >&5 $as_echo_n "checking MySQL libraries... " >&6; } if test -z "$MYSQL_LIBS" then as_fn_error "missing libraries. ****************************************************************************** ERROR: cannot find MySQL libraries. Check that you do have MySQL libraries installed. The package name is typically 'mysql-devel'. If libraries are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify libraries location explicitly, using --with-mysql-libs; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** " "$LINENO" 5 else { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MYSQL_LIBS" >&5 $as_echo "$MYSQL_LIBS" >&6; } fi MYSQL_LIBS=`echo $MYSQL_LIBS | sed -e 's/\-Bdynamic/\-Bstatic/g'` MYSQL_LIBS="-Wl,-Bstatic $MYSQL_LIBS -Wl,-Bdynamic" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi # check if we should compile with PostgreSQL support # Check whether --with-pgsql was given. if test "${with_pgsql+set}" = set; then : withval=$with_pgsql; ac_cv_use_pgsql=$withval else ac_cv_use_pgsql=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to compile with PostgreSQL support" >&5 $as_echo_n "checking whether to compile with PostgreSQL support... " >&6; } if test x$ac_cv_use_pgsql != xno; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } # Check for custom includes path if test -z "$ac_cv_pgsql_includes" then # Check whether --with-pgsql-includes was given. if test "${with_pgsql_includes+set}" = set; then : withval=$with_pgsql_includes; ac_cv_pgsql_includes=$withval fi fi if test -n "$ac_cv_pgsql_includes" then { $as_echo "$as_me:${as_lineno-$LINENO}: checking PostgreSQL includes" >&5 $as_echo_n "checking PostgreSQL includes... " >&6; } if test "${ac_cv_pgsql_includes+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_pgsql_includes="" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_pgsql_includes" >&5 $as_echo "$ac_cv_pgsql_includes" >&6; } PGSQL_CFLAGS="-I$ac_cv_pgsql_includes" fi # Check for custom library path if test -z "$ac_cv_pgsql_libs" then # Check whether --with-pgsql-libs was given. if test "${with_pgsql_libs+set}" = set; then : withval=$with_pgsql_libs; ac_cv_pgsql_libs=$withval fi fi if test -n "$ac_cv_pgsql_libs" then { $as_echo "$as_me:${as_lineno-$LINENO}: checking PostgreSQL libraries" >&5 $as_echo_n "checking PostgreSQL libraries... " >&6; } if test "${ac_cv_pgsql_libs+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_pgsql_libs="" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_pgsql_libs" >&5 $as_echo "$ac_cv_pgsql_libs" >&6; } PGSQL_LIBS="-L$ac_cv_pgsql_libs -lpq" fi # If some path is missing, try to autodetermine with pgsql_config if test -z "$ac_cv_pgsql_includes" -o -z "$ac_cv_pgsql_libs" then if test -z "$pgconfig" then # Extract the first word of "pg_config", so it can be a program name with args. set dummy pg_config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_path_pgconfig+set}" = set; then : $as_echo_n "(cached) " >&6 else case $pgconfig in [\\/]* | ?:[\\/]*) ac_cv_path_pgconfig="$pgconfig" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_path_pgconfig="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi pgconfig=$ac_cv_path_pgconfig if test -n "$pgconfig"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $pgconfig" >&5 $as_echo "$pgconfig" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$pgconfig" then as_fn_error "pg_config executable not found ******************************************************************************** ERROR: cannot find PostgreSQL libraries. If you want to compile with PosgregSQL support, you must either specify file locations explicitly using --with-pgsql-includes and --with-pgsql-libs options, or make sure path to pg_config is listed in your PATH environment variable. If you want to disable PostgreSQL support, use --without-pgsql option. ******************************************************************************** " "$LINENO" 5 else if test -z "$ac_cv_pgsql_includes" then { $as_echo "$as_me:${as_lineno-$LINENO}: checking PostgreSQL C flags" >&5 $as_echo_n "checking PostgreSQL C flags... " >&6; } PGSQL_CFLAGS="-I`${pgconfig} --includedir`" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PGSQL_CFLAGS" >&5 $as_echo "$PGSQL_CFLAGS" >&6; } fi if test -z "$ac_cv_pgsql_libs" then { $as_echo "$as_me:${as_lineno-$LINENO}: checking PostgreSQL linker flags" >&5 $as_echo_n "checking PostgreSQL linker flags... " >&6; } PGSQL_LIBS="-L`${pgconfig} --libdir` -lpq" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PGSQL_LIBS" >&5 $as_echo "$PGSQL_LIBS" >&6; } fi fi fi $as_echo "#define USE_PGSQL 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test x$ac_cv_use_pgsql != xno; then USE_PGSQL_TRUE= USE_PGSQL_FALSE='#' else USE_PGSQL_TRUE='#' USE_PGSQL_FALSE= fi # add macports include directory if (echo $MYSQL_LIBS | grep -q -- -L/opt/local/lib); then MYSQL_CFLAGS="$MYSQL_CFLAGS -I/opt/local/include" fi # we can now set preprocessor flags for both C and C++ compilers CPPFLAGS="$CPPFLAGS $MYSQL_CFLAGS $PGSQL_CFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to use 64-bit document/word IDs" >&5 $as_echo_n "checking whether to use 64-bit document/word IDs... " >&6; } sph_enable_id64=no # Check whether --enable-id64 was given. if test "${enable_id64+set}" = set; then : enableval=$enable_id64; sph_enable_id64=$enableval fi if test x$sph_enable_id64 != xno; then $as_echo "#define USE_64BIT 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else $as_echo "#define USE_64BIT 0" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Check whether --with-libstemmer was given. if test "${with_libstemmer+set}" = set; then : withval=$with_libstemmer; ac_cv_use_libstemmer=$withval else ac_cv_use_libstemmer=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to compile with libstemmer support" >&5 $as_echo_n "checking whether to compile with libstemmer support... " >&6; } if test x$ac_cv_use_libstemmer != xno; then if test -d libstemmer_c && test -f libstemmer_c/include/libstemmer.h; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } $as_echo "#define USE_LIBSTEMMER 1" >>confdefs.h else as_fn_error "missing libstemmer sources from libstemmer_c. Please download the C version of libstemmer library from http://snowball.tartarus.org/ and extract its sources over libstemmer_c/ subdirectory in order to build Sphinx with libstemmer support. " "$LINENO" 5 fi else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "#define USE_LIBSTEMMER 0" >>confdefs.h fi if test x$ac_cv_use_libstemmer != xno; then USE_LIBSTEMMER_TRUE= USE_LIBSTEMMER_FALSE='#' else USE_LIBSTEMMER_TRUE='#' USE_LIBSTEMMER_FALSE= fi got_expat=0 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libexpat" >&5 $as_echo_n "checking for libexpat... " >&6; } if test $have_expat_h = yes -a $have_libexpat = yes ; then $as_echo "#define USE_LIBEXPAT 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: found" >&5 $as_echo "found" >&6; } got_expat=1 else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 $as_echo "not found" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: xmlpipe2 will NOT be available" >&5 $as_echo "$as_me: WARNING: xmlpipe2 will NOT be available" >&2;} fi # Check whether --with-iconv was given. if test "${with_iconv+set}" = set; then : withval=$with_iconv; ac_cv_use_iconv=$withval else ac_cv_use_iconv=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libiconv" >&5 $as_echo_n "checking for libiconv... " >&6; } if test $have_iconv_h = yes \ -a $have_libiconv = yes \ -a $got_expat -eq 1 \ -a $ac_cv_use_iconv != no ; \ then $as_echo "#define USE_LIBICONV 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: found" >&5 $as_echo "found" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv() arg types" >&5 $as_echo_n "checking for iconv() arg types... " >&6; } ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { const char * inbuf; iconv_t cd; iconv ( cd, &inbuf, NULL, NULL, NULL ); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : iconv_inbuf_const=yes else iconv_inbuf_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test $iconv_inbuf_const = yes ; then $as_echo "#define ICONV_INBUF_CONST 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: const char **" >&5 $as_echo "const char **" >&6; } else $as_echo "#define ICONV_INBUF_CONST 0" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: char **" >&5 $as_echo "char **" >&6; } fi else if test $got_expat -eq 1 ; then if test $ac_cv_use_iconv = no ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: disabled" >&5 $as_echo "disabled" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 $as_echo "not found" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: xmlpipe2 will only support default encodings (latin-1, utf-8)" >&5 $as_echo "$as_me: WARNING: xmlpipe2 will only support default encodings (latin-1, utf-8)" >&2;} else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not required" >&5 $as_echo "not required" >&6; } fi fi if test $have_zlib_h = yes -a $have_lz = yes ; then $as_echo "#define USE_ZLIB 1" >>confdefs.h fi # Check whether --with-unixodbc was given. if test "${with_unixodbc+set}" = set; then : withval=$with_unixodbc; ac_cv_use_unixodbc=$withval else ac_cv_use_unixodbc=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for UnixODBC" >&5 $as_echo_n "checking for UnixODBC... " >&6; } if test $ac_cv_use_unixodbc != no ; then if test $have_sql_h = yes ; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing SQLConnect" >&5 $as_echo_n "checking for library containing SQLConnect... " >&6; } if test "${ac_cv_search_SQLConnect+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char SQLConnect (); int main () { return SQLConnect (); ; return 0; } _ACEOF for ac_lib in '' odbc iodbc; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_SQLConnect=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_SQLConnect+set}" = set; then : break fi done if test "${ac_cv_search_SQLConnect+set}" = set; then : else ac_cv_search_SQLConnect=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_SQLConnect" >&5 $as_echo "$ac_cv_search_SQLConnect" >&6; } ac_res=$ac_cv_search_SQLConnect if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_libodbc=yes else have_libodbc=no fi if test $have_libodbc = yes ; then $as_echo "#define USE_ODBC 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: found" >&5 $as_echo "found" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 $as_echo "not found" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: ODBC source support will NOT be available" >&5 $as_echo "$as_me: WARNING: ODBC source support will NOT be available" >&2;} fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: result: disabled" >&5 $as_echo "disabled" >&6; } fi # Check whether --with-syslog was given. if test "${with_syslog+set}" = set; then : withval=$with_syslog; ac_cv_use_syslog=$withval else ac_cv_use_syslog=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Syslog" >&5 $as_echo_n "checking for Syslog... " >&6; } if test $ac_cv_use_syslog != no ; then if test $have_syslog_h = yes ; then $as_echo "#define USE_SYSLOG 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else { $as_echo "$as_me:${as_lineno-$LINENO}: result: disabled" >&5 $as_echo "disabled" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for unaligned RAM access" >&5 $as_echo_n "checking for unaligned RAM access... " >&6; } if test "${sphinx_cv_unaligned_ram_access+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test "$cross_compiling" = yes; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: unknown (cross-compiling), assume no" >&5 $as_echo "unknown (cross-compiling), assume no" >&6; } sphinx_cv_unaligned_ram_access=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { char * sBuf = new char [ 8*sizeof(int) ]; for ( int i=0; i<8*sizeof(int); i++ ) sBuf[i] = i; // check for crashes (SPARC) volatile int iRes = 0; for ( int i=0; i<(int)sizeof(int); i++ ) { int * pPtr = (int*)( sBuf+i ); iRes += *pPtr; } // check for correct values (ARM) iRes = *(int*)( sBuf+1 ); if (!( iRes==0x01020304 || iRes==0x04030201 )) return 1; // all seems ok return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : sphinx_cv_unaligned_ram_access=yes else sphinx_cv_unaligned_ram_access=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $sphinx_cv_unaligned_ram_access" >&5 $as_echo "$sphinx_cv_unaligned_ram_access" >&6; } if test x$sphinx_cv_unaligned_ram_access = xyes ; then $as_echo "#define UNALIGNED_RAM_ACCESS 1" >>confdefs.h else $as_echo "#define UNALIGNED_RAM_ACCESS 0" >>confdefs.h fi # check endianness { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether byte ordering is bigendian" >&5 $as_echo_n "checking whether byte ordering is bigendian... " >&6; } if test "${ac_cv_c_bigendian+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_c_bigendian=unknown # See if we're dealing with a universal compiler. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __APPLE_CC__ not a universal capable compiler #endif typedef int dummy; _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : # Check for potential -arch flags. It is not universal unless # there are at least two -arch flags with different values. ac_arch= ac_prev= for ac_word in $CC $CFLAGS $CPPFLAGS $LDFLAGS; do if test -n "$ac_prev"; then case $ac_word in i?86 | x86_64 | ppc | ppc64) if test -z "$ac_arch" || test "$ac_arch" = "$ac_word"; then ac_arch=$ac_word else ac_cv_c_bigendian=universal break fi ;; esac ac_prev= elif test "x$ac_word" = "x-arch"; then ac_prev=arch fi done fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_c_bigendian = unknown; then # See if sys/param.h defines the BYTE_ORDER macro. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { #if ! (defined BYTE_ORDER && defined BIG_ENDIAN \ && defined LITTLE_ENDIAN && BYTE_ORDER && BIG_ENDIAN \ && LITTLE_ENDIAN) bogus endian macros #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : # It does; now see whether it defined to BIG_ENDIAN or not. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { #if BYTE_ORDER != BIG_ENDIAN not big endian #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_bigendian=yes else ac_cv_c_bigendian=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi if test $ac_cv_c_bigendian = unknown; then # See if defines _LITTLE_ENDIAN or _BIG_ENDIAN (e.g., Solaris). cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { #if ! (defined _LITTLE_ENDIAN || defined _BIG_ENDIAN) bogus endian macros #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : # It does; now see whether it defined to _BIG_ENDIAN or not. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { #ifndef _BIG_ENDIAN not big endian #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_bigendian=yes else ac_cv_c_bigendian=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi if test $ac_cv_c_bigendian = unknown; then # Compile a test program. if test "$cross_compiling" = yes; then : # Try to guess by grepping values from an object file. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; int use_ascii (int i) { return ascii_mm[i] + ascii_ii[i]; } short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; int use_ebcdic (int i) { return ebcdic_mm[i] + ebcdic_ii[i]; } extern int foo; int main () { return use_ascii (foo) == use_ebcdic (foo); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : if grep BIGenDianSyS conftest.$ac_objext >/dev/null; then ac_cv_c_bigendian=yes fi if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then if test "$ac_cv_c_bigendian" = unknown; then ac_cv_c_bigendian=no else # finding both strings is unlikely to happen, but who knows? ac_cv_c_bigendian=unknown fi fi fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* Are we little or big endian? From Harbison&Steele. */ union { long int l; char c[sizeof (long int)]; } u; u.l = 1; return u.c[sizeof (long int) - 1] == 1; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_c_bigendian=no else ac_cv_c_bigendian=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_bigendian" >&5 $as_echo "$ac_cv_c_bigendian" >&6; } case $ac_cv_c_bigendian in #( yes) $as_echo "#define USE_LITTLE_ENDIAN 0" >>confdefs.h ;; #( no) $as_echo "#define USE_LITTLE_ENDIAN 1" >>confdefs.h ;; #( universal) as_fn_error "universial endianess not supported" "$LINENO" 5 ;; #( *) as_fn_error "unknown endianess not supported" "$LINENO" 5 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: generating configuration files" >&5 $as_echo "generating configuration files" >&6; } TMP=`echo generating configuration files | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } if test "$prefix" = "NONE"; then my_op_prefix="/var" else my_op_prefix="$localstatedir" fi CONFDIR=`eval echo "${my_op_prefix}"` ac_config_files="$ac_config_files Makefile src/Makefile libstemmer_c/Makefile doc/Makefile sphinx.conf.dist:sphinx.conf.in sphinx-min.conf.dist:sphinx-min.conf.in" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then test "x$cache_file" != "x/dev/null" && { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} cat confcache >$cache_file else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' else am__EXEEXT_TRUE='#' am__EXEEXT_FALSE= fi if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then as_fn_error "conditional \"MAINTAINER_MODE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then as_fn_error "conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_MYSQL_TRUE}" && test -z "${USE_MYSQL_FALSE}"; then as_fn_error "conditional \"USE_MYSQL\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_PGSQL_TRUE}" && test -z "${USE_PGSQL_FALSE}"; then as_fn_error "conditional \"USE_PGSQL\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_LIBSTEMMER_TRUE}" && test -z "${USE_LIBSTEMMER_FALSE}"; then as_fn_error "conditional \"USE_LIBSTEMMER\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : ${CONFIG_STATUS=./config.status} ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error ERROR [LINENO LOG_FD] # --------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with status $?, using 1 if that was 0. as_fn_error () { as_status=$?; test $as_status -eq 0 && as_status=1 if test "$3"; then as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 fi $as_echo "$as_me: error: $1" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi if test -x / >/dev/null 2>&1; then as_test_x='test -x' else if ls -dL / >/dev/null 2>&1; then as_ls_L_option=L else as_ls_L_option= fi as_test_x=' eval sh -c '\'' if test -d "$1"; then test -d "$1/."; else case $1 in #( -*)set "./$1";; esac; case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ???[sx]*):;;*)false;;esac;fi '\'' sh ' fi as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by sphinx $as_me 2.0.4, which was generated by GNU Autoconf 2.65. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" config_commands="$ac_config_commands" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ sphinx config.status 2.0.4 configured by $0, generated by GNU Autoconf 2.65, with options \\"\$ac_cs_config\\" Copyright (C) 2009 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "config/config.h") CONFIG_HEADERS="$CONFIG_HEADERS config/config.h" ;; "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; "libstemmer_c/Makefile") CONFIG_FILES="$CONFIG_FILES libstemmer_c/Makefile" ;; "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;; "sphinx.conf.dist") CONFIG_FILES="$CONFIG_FILES sphinx.conf.dist:sphinx.conf.in" ;; "sphinx-min.conf.dist") CONFIG_FILES="$CONFIG_FILES sphinx-min.conf.dist:sphinx-min.conf.in" ;; *) as_fn_error "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= trap 'exit_status=$? { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error "cannot create a temporary directory in ." "$LINENO" 5 # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '$'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \ || as_fn_error "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove $(srcdir), # ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=/{ s/:*\$(srcdir):*/:/ s/:*\${srcdir}:*/:/ s/:*@srcdir@:*/:/ s/^\([^=]*=[ ]*\):*/\1/ s/:*$// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_t=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_t"; then break elif $ac_last_try; then as_fn_error "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$tmp/stdin" \ || as_fn_error "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \ || as_fn_error "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined." >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined." >&2;} rm -f "$tmp/stdin" case $ac_file in -) cat "$tmp/out" && rm -f "$tmp/out";; *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";; esac \ || as_fn_error "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { $as_echo "/* $configure_input */" \ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" } >"$tmp/config.h" \ || as_fn_error "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 $as_echo "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$tmp/config.h" "$ac_file" \ || as_fn_error "could not create $ac_file" "$LINENO" 5 fi else $as_echo "/* $configure_input */" \ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error "could not create -" "$LINENO" 5 fi # Compute "$ac_file"'s index in $config_headers. _am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || $as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$_am_arg" : 'X\(//\)[^/]' \| \ X"$_am_arg" : 'X\(//\)$' \| \ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'`/stamp-h$_am_stamp_count ;; :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 $as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in "depfiles":C) test x"$AMDEP_TRUE" != x"" || { # Autoconf 2.62 quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir=$dirpart/$fdir; as_fn_mkdir_p # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit $? fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: configuration done" >&5 $as_echo "configuration done" >&6; } TMP=`echo configuration done | sed -e sX.X-Xg` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TMP" >&5 $as_echo "$TMP" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: " >&5 $as_echo "" >&6; } echo "You can now run 'make install' to build and install Sphinx binaries." echo "On a multi-core machine, try 'make -j4 install' to speed up the build." echo echo "Updates, articles, help forum, and commercial support, consulting, training," echo "and development services are available at http://sphinxsearch.com/" echo echo "Thank you for choosing Sphinx!" echo sphinx-2.0.4-release/api/0000755000176700017710000000000011724063141014513 5ustar deogardeogarsphinx-2.0.4-release/api/sphinxapi.php0000644000176700017710000013463611711621267017251 0ustar deogardeogar=8 ) { $v = (int)$v; return pack ( "NN", $v>>32, $v&0xFFFFFFFF ); } // x32, int if ( is_int($v) ) return pack ( "NN", $v < 0 ? -1 : 0, $v ); // x32, bcmath if ( function_exists("bcmul") ) { if ( bccomp ( $v, 0 ) == -1 ) $v = bcadd ( "18446744073709551616", $v ); $h = bcdiv ( $v, "4294967296", 0 ); $l = bcmod ( $v, "4294967296" ); return pack ( "NN", (float)$h, (float)$l ); // conversion to float is intentional; int would lose 31st bit } // x32, no-bcmath $p = max(0, strlen($v) - 13); $lo = abs((float)substr($v, $p)); $hi = abs((float)substr($v, 0, $p)); $m = $lo + $hi*1316134912.0; // (10 ^ 13) % (1 << 32) = 1316134912 $q = floor($m/4294967296.0); $l = $m - ($q*4294967296.0); $h = $hi*2328.0 + $q; // (10 ^ 13) / (1 << 32) = 2328 if ( $v<0 ) { if ( $l==0 ) $h = 4294967296.0 - $h; else { $h = 4294967295.0 - $h; $l = 4294967296.0 - $l; } } return pack ( "NN", $h, $l ); } /// pack 64-bit unsigned function sphPackU64 ( $v ) { assert ( is_numeric($v) ); // x64 if ( PHP_INT_SIZE>=8 ) { assert ( $v>=0 ); // x64, int if ( is_int($v) ) return pack ( "NN", $v>>32, $v&0xFFFFFFFF ); // x64, bcmath if ( function_exists("bcmul") ) { $h = bcdiv ( $v, 4294967296, 0 ); $l = bcmod ( $v, 4294967296 ); return pack ( "NN", $h, $l ); } // x64, no-bcmath $p = max ( 0, strlen($v) - 13 ); $lo = (int)substr ( $v, $p ); $hi = (int)substr ( $v, 0, $p ); $m = $lo + $hi*1316134912; $l = $m % 4294967296; $h = $hi*2328 + (int)($m/4294967296); return pack ( "NN", $h, $l ); } // x32, int if ( is_int($v) ) return pack ( "NN", 0, $v ); // x32, bcmath if ( function_exists("bcmul") ) { $h = bcdiv ( $v, "4294967296", 0 ); $l = bcmod ( $v, "4294967296" ); return pack ( "NN", (float)$h, (float)$l ); // conversion to float is intentional; int would lose 31st bit } // x32, no-bcmath $p = max(0, strlen($v) - 13); $lo = (float)substr($v, $p); $hi = (float)substr($v, 0, $p); $m = $lo + $hi*1316134912.0; $q = floor($m / 4294967296.0); $l = $m - ($q * 4294967296.0); $h = $hi*2328.0 + $q; return pack ( "NN", $h, $l ); } // unpack 64-bit unsigned function sphUnpackU64 ( $v ) { list ( $hi, $lo ) = array_values ( unpack ( "N*N*", $v ) ); if ( PHP_INT_SIZE>=8 ) { if ( $hi<0 ) $hi += (1<<32); // because php 5.2.2 to 5.2.5 is totally fucked up again if ( $lo<0 ) $lo += (1<<32); // x64, int if ( $hi<=2147483647 ) return ($hi<<32) + $lo; // x64, bcmath if ( function_exists("bcmul") ) return bcadd ( $lo, bcmul ( $hi, "4294967296" ) ); // x64, no-bcmath $C = 100000; $h = ((int)($hi / $C) << 32) + (int)($lo / $C); $l = (($hi % $C) << 32) + ($lo % $C); if ( $l>$C ) { $h += (int)($l / $C); $l = $l % $C; } if ( $h==0 ) return $l; return sprintf ( "%d%05d", $h, $l ); } // x32, int if ( $hi==0 ) { if ( $lo>0 ) return $lo; return sprintf ( "%u", $lo ); } $hi = sprintf ( "%u", $hi ); $lo = sprintf ( "%u", $lo ); // x32, bcmath if ( function_exists("bcmul") ) return bcadd ( $lo, bcmul ( $hi, "4294967296" ) ); // x32, no-bcmath $hi = (float)$hi; $lo = (float)$lo; $q = floor($hi/10000000.0); $r = $hi - $q*10000000.0; $m = $lo + $r*4967296.0; $mq = floor($m/10000000.0); $l = $m - $mq*10000000.0; $h = $q*4294967296.0 + $r*429.0 + $mq; $h = sprintf ( "%.0f", $h ); $l = sprintf ( "%07.0f", $l ); if ( $h=="0" ) return sprintf( "%.0f", (float)$l ); return $h . $l; } // unpack 64-bit signed function sphUnpackI64 ( $v ) { list ( $hi, $lo ) = array_values ( unpack ( "N*N*", $v ) ); // x64 if ( PHP_INT_SIZE>=8 ) { if ( $hi<0 ) $hi += (1<<32); // because php 5.2.2 to 5.2.5 is totally fucked up again if ( $lo<0 ) $lo += (1<<32); return ($hi<<32) + $lo; } // x32, int if ( $hi==0 ) { if ( $lo>0 ) return $lo; return sprintf ( "%u", $lo ); } // x32, int elseif ( $hi==-1 ) { if ( $lo<0 ) return $lo; return sprintf ( "%.0f", $lo - 4294967296.0 ); } $neg = ""; $c = 0; if ( $hi<0 ) { $hi = ~$hi; $lo = ~$lo; $c = 1; $neg = "-"; } $hi = sprintf ( "%u", $hi ); $lo = sprintf ( "%u", $lo ); // x32, bcmath if ( function_exists("bcmul") ) return $neg . bcadd ( bcadd ( $lo, bcmul ( $hi, "4294967296" ) ), $c ); // x32, no-bcmath $hi = (float)$hi; $lo = (float)$lo; $q = floor($hi/10000000.0); $r = $hi - $q*10000000.0; $m = $lo + $r*4967296.0; $mq = floor($m/10000000.0); $l = $m - $mq*10000000.0 + $c; $h = $q*4294967296.0 + $r*429.0 + $mq; if ( $l==10000000 ) { $l = 0; $h += 1; } $h = sprintf ( "%.0f", $h ); $l = sprintf ( "%07.0f", $l ); if ( $h=="0" ) return $neg . sprintf( "%.0f", (float)$l ); return $neg . $h . $l; } function sphFixUint ( $value ) { if ( PHP_INT_SIZE>=8 ) { // x64 route, workaround broken unpack() in 5.2.2+ if ( $value<0 ) $value += (1<<32); return $value; } else { // x32 route, workaround php signed/unsigned braindamage return sprintf ( "%u", $value ); } } /// sphinx searchd client class class SphinxClient { var $_host; ///< searchd host (default is "localhost") var $_port; ///< searchd port (default is 9312) var $_offset; ///< how many records to seek from result-set start (default is 0) var $_limit; ///< how many records to return from result-set starting at offset (default is 20) var $_mode; ///< query matching mode (default is SPH_MATCH_ALL) var $_weights; ///< per-field weights (default is 1 for all fields) var $_sort; ///< match sorting mode (default is SPH_SORT_RELEVANCE) var $_sortby; ///< attribute to sort by (defualt is "") var $_min_id; ///< min ID to match (default is 0, which means no limit) var $_max_id; ///< max ID to match (default is 0, which means no limit) var $_filters; ///< search filters var $_groupby; ///< group-by attribute name var $_groupfunc; ///< group-by function (to pre-process group-by attribute value with) var $_groupsort; ///< group-by sorting clause (to sort groups in result set with) var $_groupdistinct;///< group-by count-distinct attribute var $_maxmatches; ///< max matches to retrieve var $_cutoff; ///< cutoff to stop searching at (default is 0) var $_retrycount; ///< distributed retries count var $_retrydelay; ///< distributed retries delay var $_anchor; ///< geographical anchor point var $_indexweights; ///< per-index weights var $_ranker; ///< ranking mode (default is SPH_RANK_PROXIMITY_BM25) var $_rankexpr; ///< ranking mode expression (for SPH_RANK_EXPR) var $_maxquerytime; ///< max query time, milliseconds (default is 0, do not limit) var $_fieldweights; ///< per-field-name weights var $_overrides; ///< per-query attribute values overrides var $_select; ///< select-list (attributes or expressions, with optional aliases) var $_error; ///< last error message var $_warning; ///< last warning message var $_connerror; ///< connection error vs remote error flag var $_reqs; ///< requests array for multi-query var $_mbenc; ///< stored mbstring encoding var $_arrayresult; ///< whether $result["matches"] should be a hash or an array var $_timeout; ///< connect timeout ///////////////////////////////////////////////////////////////////////////// // common stuff ///////////////////////////////////////////////////////////////////////////// /// create a new client object and fill defaults function SphinxClient () { // per-client-object settings $this->_host = "localhost"; $this->_port = 9312; $this->_path = false; $this->_socket = false; // per-query settings $this->_offset = 0; $this->_limit = 20; $this->_mode = SPH_MATCH_ALL; $this->_weights = array (); $this->_sort = SPH_SORT_RELEVANCE; $this->_sortby = ""; $this->_min_id = 0; $this->_max_id = 0; $this->_filters = array (); $this->_groupby = ""; $this->_groupfunc = SPH_GROUPBY_DAY; $this->_groupsort = "@group desc"; $this->_groupdistinct= ""; $this->_maxmatches = 1000; $this->_cutoff = 0; $this->_retrycount = 0; $this->_retrydelay = 0; $this->_anchor = array (); $this->_indexweights= array (); $this->_ranker = SPH_RANK_PROXIMITY_BM25; $this->_rankexpr = ""; $this->_maxquerytime= 0; $this->_fieldweights= array(); $this->_overrides = array(); $this->_select = "*"; $this->_error = ""; // per-reply fields (for single-query case) $this->_warning = ""; $this->_connerror = false; $this->_reqs = array (); // requests storage (for multi-query case) $this->_mbenc = ""; $this->_arrayresult = false; $this->_timeout = 0; } function __destruct() { if ( $this->_socket !== false ) fclose ( $this->_socket ); } /// get last error message (string) function GetLastError () { return $this->_error; } /// get last warning message (string) function GetLastWarning () { return $this->_warning; } /// get last error flag (to tell network connection errors from searchd errors or broken responses) function IsConnectError() { return $this->_connerror; } /// set searchd host name (string) and port (integer) function SetServer ( $host, $port = 0 ) { assert ( is_string($host) ); if ( $host[0] == '/') { $this->_path = 'unix://' . $host; return; } if ( substr ( $host, 0, 7 )=="unix://" ) { $this->_path = $host; return; } assert ( is_int($port) ); $this->_host = $host; $this->_port = $port; $this->_path = ''; } /// set server connection timeout (0 to remove) function SetConnectTimeout ( $timeout ) { assert ( is_numeric($timeout) ); $this->_timeout = $timeout; } function _Send ( $handle, $data, $length ) { if ( feof($handle) || fwrite ( $handle, $data, $length ) !== $length ) { $this->_error = 'connection unexpectedly closed (timed out?)'; $this->_connerror = true; return false; } return true; } ///////////////////////////////////////////////////////////////////////////// /// enter mbstring workaround mode function _MBPush () { $this->_mbenc = ""; if ( ini_get ( "mbstring.func_overload" ) & 2 ) { $this->_mbenc = mb_internal_encoding(); mb_internal_encoding ( "latin1" ); } } /// leave mbstring workaround mode function _MBPop () { if ( $this->_mbenc ) mb_internal_encoding ( $this->_mbenc ); } /// connect to searchd server function _Connect () { if ( $this->_socket!==false ) { // we are in persistent connection mode, so we have a socket // however, need to check whether it's still alive if ( !@feof ( $this->_socket ) ) return $this->_socket; // force reopen $this->_socket = false; } $errno = 0; $errstr = ""; $this->_connerror = false; if ( $this->_path ) { $host = $this->_path; $port = 0; } else { $host = $this->_host; $port = $this->_port; } if ( $this->_timeout<=0 ) $fp = @fsockopen ( $host, $port, $errno, $errstr ); else $fp = @fsockopen ( $host, $port, $errno, $errstr, $this->_timeout ); if ( !$fp ) { if ( $this->_path ) $location = $this->_path; else $location = "{$this->_host}:{$this->_port}"; $errstr = trim ( $errstr ); $this->_error = "connection to $location failed (errno=$errno, msg=$errstr)"; $this->_connerror = true; return false; } // send my version // this is a subtle part. we must do it before (!) reading back from searchd. // because otherwise under some conditions (reported on FreeBSD for instance) // TCP stack could throttle write-write-read pattern because of Nagle. if ( !$this->_Send ( $fp, pack ( "N", 1 ), 4 ) ) { fclose ( $fp ); $this->_error = "failed to send client protocol version"; return false; } // check version list(,$v) = unpack ( "N*", fread ( $fp, 4 ) ); $v = (int)$v; if ( $v<1 ) { fclose ( $fp ); $this->_error = "expected searchd protocol version 1+, got version '$v'"; return false; } return $fp; } /// get and check response packet from searchd server function _GetResponse ( $fp, $client_ver ) { $response = ""; $len = 0; $header = fread ( $fp, 8 ); if ( strlen($header)==8 ) { list ( $status, $ver, $len ) = array_values ( unpack ( "n2a/Nb", $header ) ); $left = $len; while ( $left>0 && !feof($fp) ) { $chunk = fread ( $fp, min ( 8192, $left ) ); if ( $chunk ) { $response .= $chunk; $left -= strlen($chunk); } } } if ( $this->_socket === false ) fclose ( $fp ); // check response $read = strlen ( $response ); if ( !$response || $read!=$len ) { $this->_error = $len ? "failed to read searchd response (status=$status, ver=$ver, len=$len, read=$read)" : "received zero-sized searchd response"; return false; } // check status if ( $status==SEARCHD_WARNING ) { list(,$wlen) = unpack ( "N*", substr ( $response, 0, 4 ) ); $this->_warning = substr ( $response, 4, $wlen ); return substr ( $response, 4+$wlen ); } if ( $status==SEARCHD_ERROR ) { $this->_error = "searchd error: " . substr ( $response, 4 ); return false; } if ( $status==SEARCHD_RETRY ) { $this->_error = "temporary searchd error: " . substr ( $response, 4 ); return false; } if ( $status!=SEARCHD_OK ) { $this->_error = "unknown status code '$status'"; return false; } // check version if ( $ver<$client_ver ) { $this->_warning = sprintf ( "searchd command v.%d.%d older than client's v.%d.%d, some options might not work", $ver>>8, $ver&0xff, $client_ver>>8, $client_ver&0xff ); } return $response; } ///////////////////////////////////////////////////////////////////////////// // searching ///////////////////////////////////////////////////////////////////////////// /// set offset and count into result set, /// and optionally set max-matches and cutoff limits function SetLimits ( $offset, $limit, $max=0, $cutoff=0 ) { assert ( is_int($offset) ); assert ( is_int($limit) ); assert ( $offset>=0 ); assert ( $limit>0 ); assert ( $max>=0 ); $this->_offset = $offset; $this->_limit = $limit; if ( $max>0 ) $this->_maxmatches = $max; if ( $cutoff>0 ) $this->_cutoff = $cutoff; } /// set maximum query time, in milliseconds, per-index /// integer, 0 means "do not limit" function SetMaxQueryTime ( $max ) { assert ( is_int($max) ); assert ( $max>=0 ); $this->_maxquerytime = $max; } /// set matching mode function SetMatchMode ( $mode ) { assert ( $mode==SPH_MATCH_ALL || $mode==SPH_MATCH_ANY || $mode==SPH_MATCH_PHRASE || $mode==SPH_MATCH_BOOLEAN || $mode==SPH_MATCH_EXTENDED || $mode==SPH_MATCH_FULLSCAN || $mode==SPH_MATCH_EXTENDED2 ); $this->_mode = $mode; } /// set ranking mode function SetRankingMode ( $ranker, $rankexpr="" ) { assert ( $ranker>=0 && $ranker_ranker = $ranker; $this->_rankexpr = $rankexpr; } /// set matches sorting mode function SetSortMode ( $mode, $sortby="" ) { assert ( $mode==SPH_SORT_RELEVANCE || $mode==SPH_SORT_ATTR_DESC || $mode==SPH_SORT_ATTR_ASC || $mode==SPH_SORT_TIME_SEGMENTS || $mode==SPH_SORT_EXTENDED || $mode==SPH_SORT_EXPR ); assert ( is_string($sortby) ); assert ( $mode==SPH_SORT_RELEVANCE || strlen($sortby)>0 ); $this->_sort = $mode; $this->_sortby = $sortby; } /// bind per-field weights by order /// DEPRECATED; use SetFieldWeights() instead function SetWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $weight ) assert ( is_int($weight) ); $this->_weights = $weights; } /// bind per-field weights by name function SetFieldWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $name=>$weight ) { assert ( is_string($name) ); assert ( is_int($weight) ); } $this->_fieldweights = $weights; } /// bind per-index weights by name function SetIndexWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $index=>$weight ) { assert ( is_string($index) ); assert ( is_int($weight) ); } $this->_indexweights = $weights; } /// set IDs range to match /// only match records if document ID is beetwen $min and $max (inclusive) function SetIDRange ( $min, $max ) { assert ( is_numeric($min) ); assert ( is_numeric($max) ); assert ( $min<=$max ); $this->_min_id = $min; $this->_max_id = $max; } /// set values set filter /// only match records where $attribute value is in given set function SetFilter ( $attribute, $values, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_array($values) ); assert ( count($values) ); if ( is_array($values) && count($values) ) { foreach ( $values as $value ) assert ( is_numeric($value) ); $this->_filters[] = array ( "type"=>SPH_FILTER_VALUES, "attr"=>$attribute, "exclude"=>$exclude, "values"=>$values ); } } /// set range filter /// only match records if $attribute value is beetwen $min and $max (inclusive) function SetFilterRange ( $attribute, $min, $max, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_numeric($min) ); assert ( is_numeric($max) ); assert ( $min<=$max ); $this->_filters[] = array ( "type"=>SPH_FILTER_RANGE, "attr"=>$attribute, "exclude"=>$exclude, "min"=>$min, "max"=>$max ); } /// set float range filter /// only match records if $attribute value is beetwen $min and $max (inclusive) function SetFilterFloatRange ( $attribute, $min, $max, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_float($min) ); assert ( is_float($max) ); assert ( $min<=$max ); $this->_filters[] = array ( "type"=>SPH_FILTER_FLOATRANGE, "attr"=>$attribute, "exclude"=>$exclude, "min"=>$min, "max"=>$max ); } /// setup anchor point for geosphere distance calculations /// required to use @geodist in filters and sorting /// latitude and longitude must be in radians function SetGeoAnchor ( $attrlat, $attrlong, $lat, $long ) { assert ( is_string($attrlat) ); assert ( is_string($attrlong) ); assert ( is_float($lat) ); assert ( is_float($long) ); $this->_anchor = array ( "attrlat"=>$attrlat, "attrlong"=>$attrlong, "lat"=>$lat, "long"=>$long ); } /// set grouping attribute and function function SetGroupBy ( $attribute, $func, $groupsort="@group desc" ) { assert ( is_string($attribute) ); assert ( is_string($groupsort) ); assert ( $func==SPH_GROUPBY_DAY || $func==SPH_GROUPBY_WEEK || $func==SPH_GROUPBY_MONTH || $func==SPH_GROUPBY_YEAR || $func==SPH_GROUPBY_ATTR || $func==SPH_GROUPBY_ATTRPAIR ); $this->_groupby = $attribute; $this->_groupfunc = $func; $this->_groupsort = $groupsort; } /// set count-distinct attribute for group-by queries function SetGroupDistinct ( $attribute ) { assert ( is_string($attribute) ); $this->_groupdistinct = $attribute; } /// set distributed retries count and delay function SetRetries ( $count, $delay=0 ) { assert ( is_int($count) && $count>=0 ); assert ( is_int($delay) && $delay>=0 ); $this->_retrycount = $count; $this->_retrydelay = $delay; } /// set result set format (hash or array; hash by default) /// PHP specific; needed for group-by-MVA result sets that may contain duplicate IDs function SetArrayResult ( $arrayresult ) { assert ( is_bool($arrayresult) ); $this->_arrayresult = $arrayresult; } /// set attribute values override /// there can be only one override per attribute /// $values must be a hash that maps document IDs to attribute values function SetOverride ( $attrname, $attrtype, $values ) { assert ( is_string ( $attrname ) ); assert ( in_array ( $attrtype, array ( SPH_ATTR_INTEGER, SPH_ATTR_TIMESTAMP, SPH_ATTR_BOOL, SPH_ATTR_FLOAT, SPH_ATTR_BIGINT ) ) ); assert ( is_array ( $values ) ); $this->_overrides[$attrname] = array ( "attr"=>$attrname, "type"=>$attrtype, "values"=>$values ); } /// set select-list (attributes or expressions), SQL-like syntax function SetSelect ( $select ) { assert ( is_string ( $select ) ); $this->_select = $select; } ////////////////////////////////////////////////////////////////////////////// /// clear all filters (for multi-queries) function ResetFilters () { $this->_filters = array(); $this->_anchor = array(); } /// clear groupby settings (for multi-queries) function ResetGroupBy () { $this->_groupby = ""; $this->_groupfunc = SPH_GROUPBY_DAY; $this->_groupsort = "@group desc"; $this->_groupdistinct= ""; } /// clear all attribute value overrides (for multi-queries) function ResetOverrides () { $this->_overrides = array (); } ////////////////////////////////////////////////////////////////////////////// /// connect to searchd server, run given search query through given indexes, /// and return the search results function Query ( $query, $index="*", $comment="" ) { assert ( empty($this->_reqs) ); $this->AddQuery ( $query, $index, $comment ); $results = $this->RunQueries (); $this->_reqs = array (); // just in case it failed too early if ( !is_array($results) ) return false; // probably network error; error message should be already filled $this->_error = $results[0]["error"]; $this->_warning = $results[0]["warning"]; if ( $results[0]["status"]==SEARCHD_ERROR ) return false; else return $results[0]; } /// helper to pack floats in network byte order function _PackFloat ( $f ) { $t1 = pack ( "f", $f ); // machine order list(,$t2) = unpack ( "L*", $t1 ); // int in machine order return pack ( "N", $t2 ); } /// add query to multi-query batch /// returns index into results array from RunQueries() call function AddQuery ( $query, $index="*", $comment="" ) { // mbstring workaround $this->_MBPush (); // build request $req = pack ( "NNNN", $this->_offset, $this->_limit, $this->_mode, $this->_ranker ); if ( $this->_ranker==SPH_RANK_EXPR ) $req .= pack ( "N", strlen($this->_rankexpr) ) . $this->_rankexpr; $req .= pack ( "N", $this->_sort ); // (deprecated) sort mode $req .= pack ( "N", strlen($this->_sortby) ) . $this->_sortby; $req .= pack ( "N", strlen($query) ) . $query; // query itself $req .= pack ( "N", count($this->_weights) ); // weights foreach ( $this->_weights as $weight ) $req .= pack ( "N", (int)$weight ); $req .= pack ( "N", strlen($index) ) . $index; // indexes $req .= pack ( "N", 1 ); // id64 range marker $req .= sphPackU64 ( $this->_min_id ) . sphPackU64 ( $this->_max_id ); // id64 range // filters $req .= pack ( "N", count($this->_filters) ); foreach ( $this->_filters as $filter ) { $req .= pack ( "N", strlen($filter["attr"]) ) . $filter["attr"]; $req .= pack ( "N", $filter["type"] ); switch ( $filter["type"] ) { case SPH_FILTER_VALUES: $req .= pack ( "N", count($filter["values"]) ); foreach ( $filter["values"] as $value ) $req .= sphPackI64 ( $value ); break; case SPH_FILTER_RANGE: $req .= sphPackI64 ( $filter["min"] ) . sphPackI64 ( $filter["max"] ); break; case SPH_FILTER_FLOATRANGE: $req .= $this->_PackFloat ( $filter["min"] ) . $this->_PackFloat ( $filter["max"] ); break; default: assert ( 0 && "internal error: unhandled filter type" ); } $req .= pack ( "N", $filter["exclude"] ); } // group-by clause, max-matches count, group-sort clause, cutoff count $req .= pack ( "NN", $this->_groupfunc, strlen($this->_groupby) ) . $this->_groupby; $req .= pack ( "N", $this->_maxmatches ); $req .= pack ( "N", strlen($this->_groupsort) ) . $this->_groupsort; $req .= pack ( "NNN", $this->_cutoff, $this->_retrycount, $this->_retrydelay ); $req .= pack ( "N", strlen($this->_groupdistinct) ) . $this->_groupdistinct; // anchor point if ( empty($this->_anchor) ) { $req .= pack ( "N", 0 ); } else { $a =& $this->_anchor; $req .= pack ( "N", 1 ); $req .= pack ( "N", strlen($a["attrlat"]) ) . $a["attrlat"]; $req .= pack ( "N", strlen($a["attrlong"]) ) . $a["attrlong"]; $req .= $this->_PackFloat ( $a["lat"] ) . $this->_PackFloat ( $a["long"] ); } // per-index weights $req .= pack ( "N", count($this->_indexweights) ); foreach ( $this->_indexweights as $idx=>$weight ) $req .= pack ( "N", strlen($idx) ) . $idx . pack ( "N", $weight ); // max query time $req .= pack ( "N", $this->_maxquerytime ); // per-field weights $req .= pack ( "N", count($this->_fieldweights) ); foreach ( $this->_fieldweights as $field=>$weight ) $req .= pack ( "N", strlen($field) ) . $field . pack ( "N", $weight ); // comment $req .= pack ( "N", strlen($comment) ) . $comment; // attribute overrides $req .= pack ( "N", count($this->_overrides) ); foreach ( $this->_overrides as $key => $entry ) { $req .= pack ( "N", strlen($entry["attr"]) ) . $entry["attr"]; $req .= pack ( "NN", $entry["type"], count($entry["values"]) ); foreach ( $entry["values"] as $id=>$val ) { assert ( is_numeric($id) ); assert ( is_numeric($val) ); $req .= sphPackU64 ( $id ); switch ( $entry["type"] ) { case SPH_ATTR_FLOAT: $req .= $this->_PackFloat ( $val ); break; case SPH_ATTR_BIGINT: $req .= sphPackI64 ( $val ); break; default: $req .= pack ( "N", $val ); break; } } } // select-list $req .= pack ( "N", strlen($this->_select) ) . $this->_select; // mbstring workaround $this->_MBPop (); // store request to requests array $this->_reqs[] = $req; return count($this->_reqs)-1; } /// connect to searchd, run queries batch, and return an array of result sets function RunQueries () { if ( empty($this->_reqs) ) { $this->_error = "no queries defined, issue AddQuery() first"; return false; } // mbstring workaround $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop (); return false; } // send query, get response $nreqs = count($this->_reqs); $req = join ( "", $this->_reqs ); $len = 8+strlen($req); $req = pack ( "nnNNN", SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, $len, 0, $nreqs ) . $req; // add header if ( !( $this->_Send ( $fp, $req, $len+8 ) ) || !( $response = $this->_GetResponse ( $fp, VER_COMMAND_SEARCH ) ) ) { $this->_MBPop (); return false; } // query sent ok; we can reset reqs now $this->_reqs = array (); // parse and return response return $this->_ParseSearchResponse ( $response, $nreqs ); } /// parse and return search query (or queries) response function _ParseSearchResponse ( $response, $nreqs ) { $p = 0; // current position $max = strlen($response); // max position for checks, to protect against broken responses $results = array (); for ( $ires=0; $ires<$nreqs && $p<$max; $ires++ ) { $results[] = array(); $result =& $results[$ires]; $result["error"] = ""; $result["warning"] = ""; // extract status list(,$status) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $result["status"] = $status; if ( $status!=SEARCHD_OK ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $message = substr ( $response, $p, $len ); $p += $len; if ( $status==SEARCHD_WARNING ) { $result["warning"] = $message; } else { $result["error"] = $message; continue; } } // read schema $fields = array (); $attrs = array (); list(,$nfields) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; while ( $nfields-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $fields[] = substr ( $response, $p, $len ); $p += $len; } $result["fields"] = $fields; list(,$nattrs) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; while ( $nattrs-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attr = substr ( $response, $p, $len ); $p += $len; list(,$type) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attrs[$attr] = $type; } $result["attrs"] = $attrs; // read match count list(,$count) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; list(,$id64) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; // read matches $idx = -1; while ( $count-->0 && $p<$max ) { // index into result array $idx++; // parse document id and weight if ( $id64 ) { $doc = sphUnpackU64 ( substr ( $response, $p, 8 ) ); $p += 8; list(,$weight) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; } else { list ( $doc, $weight ) = array_values ( unpack ( "N*N*", substr ( $response, $p, 8 ) ) ); $p += 8; $doc = sphFixUint($doc); } $weight = sprintf ( "%u", $weight ); // create match entry if ( $this->_arrayresult ) $result["matches"][$idx] = array ( "id"=>$doc, "weight"=>$weight ); else $result["matches"][$doc]["weight"] = $weight; // parse and create attributes $attrvals = array (); foreach ( $attrs as $attr=>$type ) { // handle 64bit ints if ( $type==SPH_ATTR_BIGINT ) { $attrvals[$attr] = sphUnpackI64 ( substr ( $response, $p, 8 ) ); $p += 8; continue; } // handle floats if ( $type==SPH_ATTR_FLOAT ) { list(,$uval) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; list(,$fval) = unpack ( "f*", pack ( "L", $uval ) ); $attrvals[$attr] = $fval; continue; } // handle everything else as unsigned ints list(,$val) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; if ( $type==SPH_ATTR_MULTI ) { $attrvals[$attr] = array (); $nvalues = $val; while ( $nvalues-->0 && $p<$max ) { list(,$val) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attrvals[$attr][] = sphFixUint($val); } } else if ( $type==SPH_ATTR_MULTI64 ) { $attrvals[$attr] = array (); $nvalues = $val; while ( $nvalues>0 && $p<$max ) { $attrvals[$attr][] = sphUnpackU64 ( substr ( $response, $p, 8 ) ); $p += 8; $nvalues -= 2; } } else if ( $type==SPH_ATTR_STRING ) { $attrvals[$attr] = substr ( $response, $p, $val ); $p += $val; } else { $attrvals[$attr] = sphFixUint($val); } } if ( $this->_arrayresult ) $result["matches"][$idx]["attrs"] = $attrvals; else $result["matches"][$doc]["attrs"] = $attrvals; } list ( $total, $total_found, $msecs, $words ) = array_values ( unpack ( "N*N*N*N*", substr ( $response, $p, 16 ) ) ); $result["total"] = sprintf ( "%u", $total ); $result["total_found"] = sprintf ( "%u", $total_found ); $result["time"] = sprintf ( "%.3f", $msecs/1000 ); $p += 16; while ( $words-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $word = substr ( $response, $p, $len ); $p += $len; list ( $docs, $hits ) = array_values ( unpack ( "N*N*", substr ( $response, $p, 8 ) ) ); $p += 8; $result["words"][$word] = array ( "docs"=>sprintf ( "%u", $docs ), "hits"=>sprintf ( "%u", $hits ) ); } } $this->_MBPop (); return $results; } ///////////////////////////////////////////////////////////////////////////// // excerpts generation ///////////////////////////////////////////////////////////////////////////// /// connect to searchd server, and generate exceprts (snippets) /// of given documents for given query. returns false on failure, /// an array of snippets on success function BuildExcerpts ( $docs, $index, $words, $opts=array() ) { assert ( is_array($docs) ); assert ( is_string($index) ); assert ( is_string($words) ); assert ( is_array($opts) ); $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return false; } ///////////////// // fixup options ///////////////// if ( !isset($opts["before_match"]) ) $opts["before_match"] = ""; if ( !isset($opts["after_match"]) ) $opts["after_match"] = ""; if ( !isset($opts["chunk_separator"]) ) $opts["chunk_separator"] = " ... "; if ( !isset($opts["limit"]) ) $opts["limit"] = 256; if ( !isset($opts["limit_passages"]) ) $opts["limit_passages"] = 0; if ( !isset($opts["limit_words"]) ) $opts["limit_words"] = 0; if ( !isset($opts["around"]) ) $opts["around"] = 5; if ( !isset($opts["exact_phrase"]) ) $opts["exact_phrase"] = false; if ( !isset($opts["single_passage"]) ) $opts["single_passage"] = false; if ( !isset($opts["use_boundaries"]) ) $opts["use_boundaries"] = false; if ( !isset($opts["weight_order"]) ) $opts["weight_order"] = false; if ( !isset($opts["query_mode"]) ) $opts["query_mode"] = false; if ( !isset($opts["force_all_words"]) ) $opts["force_all_words"] = false; if ( !isset($opts["start_passage_id"]) ) $opts["start_passage_id"] = 1; if ( !isset($opts["load_files"]) ) $opts["load_files"] = false; if ( !isset($opts["html_strip_mode"]) ) $opts["html_strip_mode"] = "index"; if ( !isset($opts["allow_empty"]) ) $opts["allow_empty"] = false; if ( !isset($opts["passage_boundary"]) ) $opts["passage_boundary"] = "none"; if ( !isset($opts["emit_zones"]) ) $opts["emit_zones"] = false; if ( !isset($opts["load_files_scattered"]) ) $opts["load_files_scattered"] = false; ///////////////// // build request ///////////////// // v.1.2 req $flags = 1; // remove spaces if ( $opts["exact_phrase"] ) $flags |= 2; if ( $opts["single_passage"] ) $flags |= 4; if ( $opts["use_boundaries"] ) $flags |= 8; if ( $opts["weight_order"] ) $flags |= 16; if ( $opts["query_mode"] ) $flags |= 32; if ( $opts["force_all_words"] ) $flags |= 64; if ( $opts["load_files"] ) $flags |= 128; if ( $opts["allow_empty"] ) $flags |= 256; if ( $opts["emit_zones"] ) $flags |= 512; if ( $opts["load_files_scattered"] ) $flags |= 1024; $req = pack ( "NN", 0, $flags ); // mode=0, flags=$flags $req .= pack ( "N", strlen($index) ) . $index; // req index $req .= pack ( "N", strlen($words) ) . $words; // req words // options $req .= pack ( "N", strlen($opts["before_match"]) ) . $opts["before_match"]; $req .= pack ( "N", strlen($opts["after_match"]) ) . $opts["after_match"]; $req .= pack ( "N", strlen($opts["chunk_separator"]) ) . $opts["chunk_separator"]; $req .= pack ( "NN", (int)$opts["limit"], (int)$opts["around"] ); $req .= pack ( "NNN", (int)$opts["limit_passages"], (int)$opts["limit_words"], (int)$opts["start_passage_id"] ); // v.1.2 $req .= pack ( "N", strlen($opts["html_strip_mode"]) ) . $opts["html_strip_mode"]; $req .= pack ( "N", strlen($opts["passage_boundary"]) ) . $opts["passage_boundary"]; // documents $req .= pack ( "N", count($docs) ); foreach ( $docs as $doc ) { assert ( is_string($doc) ); $req .= pack ( "N", strlen($doc) ) . $doc; } //////////////////////////// // send query, get response //////////////////////////// $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, $len ) . $req; // add header if ( !( $this->_Send ( $fp, $req, $len+8 ) ) || !( $response = $this->_GetResponse ( $fp, VER_COMMAND_EXCERPT ) ) ) { $this->_MBPop (); return false; } ////////////////// // parse response ////////////////// $pos = 0; $res = array (); $rlen = strlen($response); for ( $i=0; $i $rlen ) { $this->_error = "incomplete reply"; $this->_MBPop (); return false; } $res[] = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; } $this->_MBPop (); return $res; } ///////////////////////////////////////////////////////////////////////////// // keyword generation ///////////////////////////////////////////////////////////////////////////// /// connect to searchd server, and generate keyword list for a given query /// returns false on failure, /// an array of words on success function BuildKeywords ( $query, $index, $hits ) { assert ( is_string($query) ); assert ( is_string($index) ); assert ( is_bool($hits) ); $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return false; } ///////////////// // build request ///////////////// // v.1.0 req $req = pack ( "N", strlen($query) ) . $query; // req query $req .= pack ( "N", strlen($index) ) . $index; // req index $req .= pack ( "N", (int)$hits ); //////////////////////////// // send query, get response //////////////////////////// $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, $len ) . $req; // add header if ( !( $this->_Send ( $fp, $req, $len+8 ) ) || !( $response = $this->_GetResponse ( $fp, VER_COMMAND_KEYWORDS ) ) ) { $this->_MBPop (); return false; } ////////////////// // parse response ////////////////// $pos = 0; $res = array (); $rlen = strlen($response); list(,$nwords) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; for ( $i=0; $i<$nwords; $i++ ) { list(,$len) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; $tokenized = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; list(,$len) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; $normalized = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; $res[] = array ( "tokenized"=>$tokenized, "normalized"=>$normalized ); if ( $hits ) { list($ndocs,$nhits) = array_values ( unpack ( "N*N*", substr ( $response, $pos, 8 ) ) ); $pos += 8; $res [$i]["docs"] = $ndocs; $res [$i]["hits"] = $nhits; } if ( $pos > $rlen ) { $this->_error = "incomplete reply"; $this->_MBPop (); return false; } } $this->_MBPop (); return $res; } function EscapeString ( $string ) { $from = array ( '\\', '(',')','|','-','!','@','~','"','&', '/', '^', '$', '=' ); $to = array ( '\\\\', '\(','\)','\|','\-','\!','\@','\~','\"', '\&', '\/', '\^', '\$', '\=' ); return str_replace ( $from, $to, $string ); } ///////////////////////////////////////////////////////////////////////////// // attribute updates ///////////////////////////////////////////////////////////////////////////// /// batch update given attributes in given rows in given indexes /// returns amount of updated documents (0 or more) on success, or -1 on failure function UpdateAttributes ( $index, $attrs, $values, $mva=false ) { // verify everything assert ( is_string($index) ); assert ( is_bool($mva) ); assert ( is_array($attrs) ); foreach ( $attrs as $attr ) assert ( is_string($attr) ); assert ( is_array($values) ); foreach ( $values as $id=>$entry ) { assert ( is_numeric($id) ); assert ( is_array($entry) ); assert ( count($entry)==count($attrs) ); foreach ( $entry as $v ) { if ( $mva ) { assert ( is_array($v) ); foreach ( $v as $vv ) assert ( is_int($vv) ); } else assert ( is_int($v) ); } } // build request $this->_MBPush (); $req = pack ( "N", strlen($index) ) . $index; $req .= pack ( "N", count($attrs) ); foreach ( $attrs as $attr ) { $req .= pack ( "N", strlen($attr) ) . $attr; $req .= pack ( "N", $mva ? 1 : 0 ); } $req .= pack ( "N", count($values) ); foreach ( $values as $id=>$entry ) { $req .= sphPackU64 ( $id ); foreach ( $entry as $v ) { $req .= pack ( "N", $mva ? count($v) : $v ); if ( $mva ) foreach ( $v as $vv ) $req .= pack ( "N", $vv ); } } // connect, send query, get response if (!( $fp = $this->_Connect() )) { $this->_MBPop (); return -1; } $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, $len ) . $req; // add header if ( !$this->_Send ( $fp, $req, $len+8 ) ) { $this->_MBPop (); return -1; } if (!( $response = $this->_GetResponse ( $fp, VER_COMMAND_UPDATE ) )) { $this->_MBPop (); return -1; } // parse response list(,$updated) = unpack ( "N*", substr ( $response, 0, 4 ) ); $this->_MBPop (); return $updated; } ///////////////////////////////////////////////////////////////////////////// // persistent connections ///////////////////////////////////////////////////////////////////////////// function Open() { if ( $this->_socket !== false ) { $this->_error = 'already connected'; return false; } if ( !$fp = $this->_Connect() ) return false; // command, command version = 0, body length = 4, body = 1 $req = pack ( "nnNN", SEARCHD_COMMAND_PERSIST, 0, 4, 1 ); if ( !$this->_Send ( $fp, $req, 12 ) ) return false; $this->_socket = $fp; return true; } function Close() { if ( $this->_socket === false ) { $this->_error = 'not connected'; return false; } fclose ( $this->_socket ); $this->_socket = false; return true; } ////////////////////////////////////////////////////////////////////////// // status ////////////////////////////////////////////////////////////////////////// function Status () { $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return false; } $req = pack ( "nnNN", SEARCHD_COMMAND_STATUS, VER_COMMAND_STATUS, 4, 1 ); // len=4, body=1 if ( !( $this->_Send ( $fp, $req, 12 ) ) || !( $response = $this->_GetResponse ( $fp, VER_COMMAND_STATUS ) ) ) { $this->_MBPop (); return false; } $res = substr ( $response, 4 ); // just ignore length, error handling, etc $p = 0; list ( $rows, $cols ) = array_values ( unpack ( "N*N*", substr ( $response, $p, 8 ) ) ); $p += 8; $res = array(); for ( $i=0; $i<$rows; $i++ ) for ( $j=0; $j<$cols; $j++ ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $res[$i][] = substr ( $response, $p, $len ); $p += $len; } $this->_MBPop (); return $res; } ////////////////////////////////////////////////////////////////////////// // flush ////////////////////////////////////////////////////////////////////////// function FlushAttributes () { $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return -1; } $req = pack ( "nnN", SEARCHD_COMMAND_FLUSHATTRS, VER_COMMAND_FLUSHATTRS, 0 ); // len=0 if ( !( $this->_Send ( $fp, $req, 8 ) ) || !( $response = $this->_GetResponse ( $fp, VER_COMMAND_FLUSHATTRS ) ) ) { $this->_MBPop (); return -1; } $tag = -1; if ( strlen($response)==4 ) list(,$tag) = unpack ( "N*", $response ); else $this->_error = "unexpected response length"; $this->_MBPop (); return $tag; } } // // $Id: sphinxapi.php 3087 2012-01-30 23:07:35Z shodan $ // sphinx-2.0.4-release/api/test2.php0000644000176700017710000000203510717301162016264 0ustar deogardeogar "", "after_match" => "", "chunk_separator" => " ... ", "limit" => 60, "around" => 3, ); foreach ( array(0,1) as $exact ) { $opts["exact_phrase"] = $exact; print "exact_phrase=$exact\n"; $cl = new SphinxClient (); $res = $cl->BuildExcerpts ( $docs, $index, $words, $opts ); if ( !$res ) { die ( "ERROR: " . $cl->GetLastError() . ".\n" ); } else { $n = 0; foreach ( $res as $entry ) { $n++; print "n=$n, res=$entry\n"; } print "\n"; } } // // $Id: test2.php 910 2007-11-16 11:43:46Z shodan $ // ?>sphinx-2.0.4-release/api/sphinxapi.py0000644000176700017710000007332511711621267017107 0ustar deogardeogar# # $Id: sphinxapi.py 3087 2012-01-30 23:07:35Z shodan $ # # Python version of Sphinx searchd client (Python API) # # Copyright (c) 2006, Mike Osadnik # Copyright (c) 2006-2012, Andrew Aksyonoff # Copyright (c) 2008-2012, Sphinx Technologies Inc # All rights reserved # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License. You should have # received a copy of the GPL license along with this program; if you # did not, you can find it at http://www.gnu.org/ # import sys import select import socket import re from struct import * # known searchd commands SEARCHD_COMMAND_SEARCH = 0 SEARCHD_COMMAND_EXCERPT = 1 SEARCHD_COMMAND_UPDATE = 2 SEARCHD_COMMAND_KEYWORDS = 3 SEARCHD_COMMAND_PERSIST = 4 SEARCHD_COMMAND_STATUS = 5 SEARCHD_COMMAND_FLUSHATTRS = 7 # current client-side command implementation versions VER_COMMAND_SEARCH = 0x119 VER_COMMAND_EXCERPT = 0x104 VER_COMMAND_UPDATE = 0x102 VER_COMMAND_KEYWORDS = 0x100 VER_COMMAND_STATUS = 0x100 VER_COMMAND_FLUSHATTRS = 0x100 # known searchd status codes SEARCHD_OK = 0 SEARCHD_ERROR = 1 SEARCHD_RETRY = 2 SEARCHD_WARNING = 3 # known match modes SPH_MATCH_ALL = 0 SPH_MATCH_ANY = 1 SPH_MATCH_PHRASE = 2 SPH_MATCH_BOOLEAN = 3 SPH_MATCH_EXTENDED = 4 SPH_MATCH_FULLSCAN = 5 SPH_MATCH_EXTENDED2 = 6 # known ranking modes (extended2 mode only) SPH_RANK_PROXIMITY_BM25 = 0 # default mode, phrase proximity major factor and BM25 minor one SPH_RANK_BM25 = 1 # statistical mode, BM25 ranking only (faster but worse quality) SPH_RANK_NONE = 2 # no ranking, all matches get a weight of 1 SPH_RANK_WORDCOUNT = 3 # simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts SPH_RANK_PROXIMITY = 4 SPH_RANK_MATCHANY = 5 SPH_RANK_FIELDMASK = 6 SPH_RANK_SPH04 = 7 SPH_RANK_EXPR = 8 SPH_RANK_TOTAL = 9 # known sort modes SPH_SORT_RELEVANCE = 0 SPH_SORT_ATTR_DESC = 1 SPH_SORT_ATTR_ASC = 2 SPH_SORT_TIME_SEGMENTS = 3 SPH_SORT_EXTENDED = 4 SPH_SORT_EXPR = 5 # known filter types SPH_FILTER_VALUES = 0 SPH_FILTER_RANGE = 1 SPH_FILTER_FLOATRANGE = 2 # known attribute types SPH_ATTR_NONE = 0 SPH_ATTR_INTEGER = 1 SPH_ATTR_TIMESTAMP = 2 SPH_ATTR_ORDINAL = 3 SPH_ATTR_BOOL = 4 SPH_ATTR_FLOAT = 5 SPH_ATTR_BIGINT = 6 SPH_ATTR_STRING = 7 SPH_ATTR_MULTI = 0X40000001L SPH_ATTR_MULTI64 = 0X40000002L SPH_ATTR_TYPES = (SPH_ATTR_NONE, SPH_ATTR_INTEGER, SPH_ATTR_TIMESTAMP, SPH_ATTR_ORDINAL, SPH_ATTR_BOOL, SPH_ATTR_FLOAT, SPH_ATTR_BIGINT, SPH_ATTR_STRING, SPH_ATTR_MULTI, SPH_ATTR_MULTI64) # known grouping functions SPH_GROUPBY_DAY = 0 SPH_GROUPBY_WEEK = 1 SPH_GROUPBY_MONTH = 2 SPH_GROUPBY_YEAR = 3 SPH_GROUPBY_ATTR = 4 SPH_GROUPBY_ATTRPAIR = 5 class SphinxClient: def __init__ (self): """ Create a new client object, and fill defaults. """ self._host = 'localhost' # searchd host (default is "localhost") self._port = 9312 # searchd port (default is 9312) self._path = None # searchd unix-domain socket path self._socket = None self._offset = 0 # how much records to seek from result-set start (default is 0) self._limit = 20 # how much records to return from result-set starting at offset (default is 20) self._mode = SPH_MATCH_ALL # query matching mode (default is SPH_MATCH_ALL) self._weights = [] # per-field weights (default is 1 for all fields) self._sort = SPH_SORT_RELEVANCE # match sorting mode (default is SPH_SORT_RELEVANCE) self._sortby = '' # attribute to sort by (defualt is "") self._min_id = 0 # min ID to match (default is 0) self._max_id = 0 # max ID to match (default is UINT_MAX) self._filters = [] # search filters self._groupby = '' # group-by attribute name self._groupfunc = SPH_GROUPBY_DAY # group-by function (to pre-process group-by attribute value with) self._groupsort = '@group desc' # group-by sorting clause (to sort groups in result set with) self._groupdistinct = '' # group-by count-distinct attribute self._maxmatches = 1000 # max matches to retrieve self._cutoff = 0 # cutoff to stop searching at self._retrycount = 0 # distributed retry count self._retrydelay = 0 # distributed retry delay self._anchor = {} # geographical anchor point self._indexweights = {} # per-index weights self._ranker = SPH_RANK_PROXIMITY_BM25 # ranking mode self._rankexpr = '' # ranking expression for SPH_RANK_EXPR self._maxquerytime = 0 # max query time, milliseconds (default is 0, do not limit) self._timeout = 1.0 # connection timeout self._fieldweights = {} # per-field-name weights self._overrides = {} # per-query attribute values overrides self._select = '*' # select-list (attributes or expressions, with optional aliases) self._error = '' # last error message self._warning = '' # last warning message self._reqs = [] # requests array for multi-query def __del__ (self): if self._socket: self._socket.close() def GetLastError (self): """ Get last error message (string). """ return self._error def GetLastWarning (self): """ Get last warning message (string). """ return self._warning def SetServer (self, host, port = None): """ Set searchd server host and port. """ assert(isinstance(host, str)) if host.startswith('/'): self._path = host return elif host.startswith('unix://'): self._path = host[7:] return assert(isinstance(port, int)) self._host = host self._port = port self._path = None def SetConnectTimeout ( self, timeout ): """ Set connection timeout ( float second ) """ assert (isinstance(timeout, float)) # set timeout to 0 make connaection non-blocking that is wrong so timeout got clipped to reasonable minimum self._timeout = max ( 0.001, timeout ) def _Connect (self): """ INTERNAL METHOD, DO NOT CALL. Connects to searchd server. """ if self._socket: # we have a socket, but is it still alive? sr, sw, _ = select.select ( [self._socket], [self._socket], [], 0 ) # this is how alive socket should look if len(sr)==0 and len(sw)==1: return self._socket # oops, looks like it was closed, lets reopen self._socket.close() self._socket = None try: if self._path: af = socket.AF_UNIX addr = self._path desc = self._path else: af = socket.AF_INET addr = ( self._host, self._port ) desc = '%s;%s' % addr sock = socket.socket ( af, socket.SOCK_STREAM ) sock.settimeout ( self._timeout ) sock.connect ( addr ) except socket.error, msg: if sock: sock.close() self._error = 'connection to %s failed (%s)' % ( desc, msg ) return v = unpack('>L', sock.recv(4)) if v<1: sock.close() self._error = 'expected searchd protocol version, got %s' % v return # all ok, send my version sock.send(pack('>L', 1)) return sock def _GetResponse (self, sock, client_ver): """ INTERNAL METHOD, DO NOT CALL. Gets and checks response packet from searchd server. """ (status, ver, length) = unpack('>2HL', sock.recv(8)) response = '' left = length while left>0: chunk = sock.recv(left) if chunk: response += chunk left -= len(chunk) else: break if not self._socket: sock.close() # check response read = len(response) if not response or read!=length: if length: self._error = 'failed to read searchd response (status=%s, ver=%s, len=%s, read=%s)' \ % (status, ver, length, read) else: self._error = 'received zero-sized searchd response' return None # check status if status==SEARCHD_WARNING: wend = 4 + unpack ( '>L', response[0:4] )[0] self._warning = response[4:wend] return response[wend:] if status==SEARCHD_ERROR: self._error = 'searchd error: '+response[4:] return None if status==SEARCHD_RETRY: self._error = 'temporary searchd error: '+response[4:] return None if status!=SEARCHD_OK: self._error = 'unknown status code %d' % status return None # check version if ver>8, ver&0xff, client_ver>>8, client_ver&0xff) return response def SetLimits (self, offset, limit, maxmatches=0, cutoff=0): """ Set offset and count into result set, and optionally set max-matches and cutoff limits. """ assert ( type(offset) in [int,long] and 0<=offset<16777216 ) assert ( type(limit) in [int,long] and 0=0) self._offset = offset self._limit = limit if maxmatches>0: self._maxmatches = maxmatches if cutoff>=0: self._cutoff = cutoff def SetMaxQueryTime (self, maxquerytime): """ Set maximum query time, in milliseconds, per-index. 0 means 'do not limit'. """ assert(isinstance(maxquerytime,int) and maxquerytime>0) self._maxquerytime = maxquerytime def SetMatchMode (self, mode): """ Set matching mode. """ assert(mode in [SPH_MATCH_ALL, SPH_MATCH_ANY, SPH_MATCH_PHRASE, SPH_MATCH_BOOLEAN, SPH_MATCH_EXTENDED, SPH_MATCH_FULLSCAN, SPH_MATCH_EXTENDED2]) self._mode = mode def SetRankingMode ( self, ranker, rankexpr='' ): """ Set ranking mode. """ assert(ranker>=0 and ranker=0) assert(isinstance(delay,int) and delay>=0) self._retrycount = count self._retrydelay = delay def SetOverride (self, name, type, values): assert(isinstance(name, str)) assert(type in SPH_ATTR_TYPES) assert(isinstance(values, dict)) self._overrides[name] = {'name': name, 'type': type, 'values': values} def SetSelect (self, select): assert(isinstance(select, str)) self._select = select def ResetOverrides (self): self._overrides = {} def ResetFilters (self): """ Clear all filters (for multi-queries). """ self._filters = [] self._anchor = {} def ResetGroupBy (self): """ Clear groupby settings (for multi-queries). """ self._groupby = '' self._groupfunc = SPH_GROUPBY_DAY self._groupsort = '@group desc' self._groupdistinct = '' def Query (self, query, index='*', comment=''): """ Connect to searchd server and run given search query. Returns None on failure; result set hash on success (see documentation for details). """ assert(len(self._reqs)==0) self.AddQuery(query,index,comment) results = self.RunQueries() self._reqs = [] # we won't re-run erroneous batch if not results or len(results)==0: return None self._error = results[0]['error'] self._warning = results[0]['warning'] if results[0]['status'] == SEARCHD_ERROR: return None return results[0] def AddQuery (self, query, index='*', comment=''): """ Add query to batch. """ # build request req = [] req.append(pack('>4L', self._offset, self._limit, self._mode, self._ranker)) if self._ranker==SPH_RANK_EXPR: req.append(pack('>L', len(self._rankexpr))) req.append(self._rankexpr) req.append(pack('>L', self._sort)) req.append(pack('>L', len(self._sortby))) req.append(self._sortby) if isinstance(query,unicode): query = query.encode('utf-8') assert(isinstance(query,str)) req.append(pack('>L', len(query))) req.append(query) req.append(pack('>L', len(self._weights))) for w in self._weights: req.append(pack('>L', w)) req.append(pack('>L', len(index))) req.append(index) req.append(pack('>L',1)) # id64 range marker req.append(pack('>Q', self._min_id)) req.append(pack('>Q', self._max_id)) # filters req.append ( pack ( '>L', len(self._filters) ) ) for f in self._filters: req.append ( pack ( '>L', len(f['attr'])) + f['attr']) filtertype = f['type'] req.append ( pack ( '>L', filtertype)) if filtertype == SPH_FILTER_VALUES: req.append ( pack ('>L', len(f['values']))) for val in f['values']: req.append ( pack ('>q', val)) elif filtertype == SPH_FILTER_RANGE: req.append ( pack ('>2q', f['min'], f['max'])) elif filtertype == SPH_FILTER_FLOATRANGE: req.append ( pack ('>2f', f['min'], f['max'])) req.append ( pack ( '>L', f['exclude'] ) ) # group-by, max-matches, group-sort req.append ( pack ( '>2L', self._groupfunc, len(self._groupby) ) ) req.append ( self._groupby ) req.append ( pack ( '>2L', self._maxmatches, len(self._groupsort) ) ) req.append ( self._groupsort ) req.append ( pack ( '>LLL', self._cutoff, self._retrycount, self._retrydelay)) req.append ( pack ( '>L', len(self._groupdistinct))) req.append ( self._groupdistinct) # anchor point if len(self._anchor) == 0: req.append ( pack ('>L', 0)) else: attrlat, attrlong = self._anchor['attrlat'], self._anchor['attrlong'] latitude, longitude = self._anchor['lat'], self._anchor['long'] req.append ( pack ('>L', 1)) req.append ( pack ('>L', len(attrlat)) + attrlat) req.append ( pack ('>L', len(attrlong)) + attrlong) req.append ( pack ('>f', latitude) + pack ('>f', longitude)) # per-index weights req.append ( pack ('>L',len(self._indexweights))) for indx,weight in self._indexweights.items(): req.append ( pack ('>L',len(indx)) + indx + pack ('>L',weight)) # max query time req.append ( pack ('>L', self._maxquerytime) ) # per-field weights req.append ( pack ('>L',len(self._fieldweights) ) ) for field,weight in self._fieldweights.items(): req.append ( pack ('>L',len(field)) + field + pack ('>L',weight) ) # comment req.append ( pack('>L',len(comment)) + comment ) # attribute overrides req.append ( pack('>L', len(self._overrides)) ) for v in self._overrides.values(): req.extend ( ( pack('>L', len(v['name'])), v['name'] ) ) req.append ( pack('>LL', v['type'], len(v['values'])) ) for id, value in v['values'].iteritems(): req.append ( pack('>Q', id) ) if v['type'] == SPH_ATTR_FLOAT: req.append ( pack('>f', value) ) elif v['type'] == SPH_ATTR_BIGINT: req.append ( pack('>q', value) ) else: req.append ( pack('>l', value) ) # select-list req.append ( pack('>L', len(self._select)) ) req.append ( self._select ) # send query, get response req = ''.join(req) self._reqs.append(req) return def RunQueries (self): """ Run queries batch. Returns None on network IO failure; or an array of result set hashes on success. """ if len(self._reqs)==0: self._error = 'no queries defined, issue AddQuery() first' return None sock = self._Connect() if not sock: return None req = ''.join(self._reqs) length = len(req)+8 req = pack('>HHLLL', SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, length, 0, len(self._reqs))+req sock.send(req) response = self._GetResponse(sock, VER_COMMAND_SEARCH) if not response: return None nreqs = len(self._reqs) # parse response max_ = len(response) p = 0 results = [] for i in range(0,nreqs,1): result = {} results.append(result) result['error'] = '' result['warning'] = '' status = unpack('>L', response[p:p+4])[0] p += 4 result['status'] = status if status != SEARCHD_OK: length = unpack('>L', response[p:p+4])[0] p += 4 message = response[p:p+length] p += length if status == SEARCHD_WARNING: result['warning'] = message else: result['error'] = message continue # read schema fields = [] attrs = [] nfields = unpack('>L', response[p:p+4])[0] p += 4 while nfields>0 and pL', response[p:p+4])[0] p += 4 fields.append(response[p:p+length]) p += length result['fields'] = fields nattrs = unpack('>L', response[p:p+4])[0] p += 4 while nattrs>0 and pL', response[p:p+4])[0] p += 4 attr = response[p:p+length] p += length type_ = unpack('>L', response[p:p+4])[0] p += 4 attrs.append([attr,type_]) result['attrs'] = attrs # read match count count = unpack('>L', response[p:p+4])[0] p += 4 id64 = unpack('>L', response[p:p+4])[0] p += 4 # read matches result['matches'] = [] while count>0 and pQL', response[p:p+12]) p += 12 else: doc, weight = unpack('>2L', response[p:p+8]) p += 8 match = { 'id':doc, 'weight':weight, 'attrs':{} } for i in range(len(attrs)): if attrs[i][1] == SPH_ATTR_FLOAT: match['attrs'][attrs[i][0]] = unpack('>f', response[p:p+4])[0] elif attrs[i][1] == SPH_ATTR_BIGINT: match['attrs'][attrs[i][0]] = unpack('>q', response[p:p+8])[0] p += 4 elif attrs[i][1] == SPH_ATTR_STRING: slen = unpack('>L', response[p:p+4])[0] p += 4 match['attrs'][attrs[i][0]] = '' if slen>0: match['attrs'][attrs[i][0]] = response[p:p+slen] p += slen-4 elif attrs[i][1] == SPH_ATTR_MULTI: match['attrs'][attrs[i][0]] = [] nvals = unpack('>L', response[p:p+4])[0] p += 4 for n in range(0,nvals,1): match['attrs'][attrs[i][0]].append(unpack('>L', response[p:p+4])[0]) p += 4 p -= 4 elif attrs[i][1] == SPH_ATTR_MULTI64: match['attrs'][attrs[i][0]] = [] nvals = unpack('>L', response[p:p+4])[0] nvals = nvals/2 p += 4 for n in range(0,nvals,1): match['attrs'][attrs[i][0]].append(unpack('>q', response[p:p+8])[0]) p += 8 p -= 4 else: match['attrs'][attrs[i][0]] = unpack('>L', response[p:p+4])[0] p += 4 result['matches'].append ( match ) result['total'], result['total_found'], result['time'], words = unpack('>4L', response[p:p+16]) result['time'] = '%.3f' % (result['time']/1000.0) p += 16 result['words'] = [] while words>0: words -= 1 length = unpack('>L', response[p:p+4])[0] p += 4 word = response[p:p+length] p += length docs, hits = unpack('>2L', response[p:p+8]) p += 8 result['words'].append({'word':word, 'docs':docs, 'hits':hits}) self._reqs = [] return results def BuildExcerpts (self, docs, index, words, opts=None): """ Connect to searchd server and generate exceprts from given documents. """ if not opts: opts = {} if isinstance(words,unicode): words = words.encode('utf-8') assert(isinstance(docs, list)) assert(isinstance(index, str)) assert(isinstance(words, str)) assert(isinstance(opts, dict)) sock = self._Connect() if not sock: return None # fixup options opts.setdefault('before_match', '') opts.setdefault('after_match', '') opts.setdefault('chunk_separator', ' ... ') opts.setdefault('html_strip_mode', 'index') opts.setdefault('limit', 256) opts.setdefault('limit_passages', 0) opts.setdefault('limit_words', 0) opts.setdefault('around', 5) opts.setdefault('start_passage_id', 1) opts.setdefault('passage_boundary', 'none') # build request # v.1.0 req flags = 1 # (remove spaces) if opts.get('exact_phrase'): flags |= 2 if opts.get('single_passage'): flags |= 4 if opts.get('use_boundaries'): flags |= 8 if opts.get('weight_order'): flags |= 16 if opts.get('query_mode'): flags |= 32 if opts.get('force_all_words'): flags |= 64 if opts.get('load_files'): flags |= 128 if opts.get('allow_empty'): flags |= 256 if opts.get('emit_zones'): flags |= 512 if opts.get('load_files_scattered'): flags |= 1024 # mode=0, flags req = [pack('>2L', 0, flags)] # req index req.append(pack('>L', len(index))) req.append(index) # req words req.append(pack('>L', len(words))) req.append(words) # options req.append(pack('>L', len(opts['before_match']))) req.append(opts['before_match']) req.append(pack('>L', len(opts['after_match']))) req.append(opts['after_match']) req.append(pack('>L', len(opts['chunk_separator']))) req.append(opts['chunk_separator']) req.append(pack('>L', int(opts['limit']))) req.append(pack('>L', int(opts['around']))) req.append(pack('>L', int(opts['limit_passages']))) req.append(pack('>L', int(opts['limit_words']))) req.append(pack('>L', int(opts['start_passage_id']))) req.append(pack('>L', len(opts['html_strip_mode']))) req.append((opts['html_strip_mode'])) req.append(pack('>L', len(opts['passage_boundary']))) req.append((opts['passage_boundary'])) # documents req.append(pack('>L', len(docs))) for doc in docs: if isinstance(doc,unicode): doc = doc.encode('utf-8') assert(isinstance(doc, str)) req.append(pack('>L', len(doc))) req.append(doc) req = ''.join(req) # send query, get response length = len(req) # add header req = pack('>2HL', SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, length)+req wrote = sock.send(req) response = self._GetResponse(sock, VER_COMMAND_EXCERPT ) if not response: return [] # parse response pos = 0 res = [] rlen = len(response) for i in range(len(docs)): length = unpack('>L', response[pos:pos+4])[0] pos += 4 if pos+length > rlen: self._error = 'incomplete reply' return [] res.append(response[pos:pos+length]) pos += length return res def UpdateAttributes ( self, index, attrs, values, mva=False ): """ Update given attribute values on given documents in given indexes. Returns amount of updated documents (0 or more) on success, or -1 on failure. 'attrs' must be a list of strings. 'values' must be a dict with int key (document ID) and list of int values (new attribute values). optional boolean parameter 'mva' points that there is update of MVA attributes. In this case the 'values' must be a dict with int key (document ID) and list of lists of int values (new MVA attribute values). Example: res = cl.UpdateAttributes ( 'test1', [ 'group_id', 'date_added' ], { 2:[123,1000000000], 4:[456,1234567890] } ) """ assert ( isinstance ( index, str ) ) assert ( isinstance ( attrs, list ) ) assert ( isinstance ( values, dict ) ) for attr in attrs: assert ( isinstance ( attr, str ) ) for docid, entry in values.items(): AssertUInt32(docid) assert ( isinstance ( entry, list ) ) assert ( len(attrs)==len(entry) ) for val in entry: if mva: assert ( isinstance ( val, list ) ) for vals in val: AssertInt32(vals) else: AssertInt32(val) # build request req = [ pack('>L',len(index)), index ] req.append ( pack('>L',len(attrs)) ) mva_attr = 0 if mva: mva_attr = 1 for attr in attrs: req.append ( pack('>L',len(attr)) + attr ) req.append ( pack('>L', mva_attr ) ) req.append ( pack('>L',len(values)) ) for docid, entry in values.items(): req.append ( pack('>Q',docid) ) for val in entry: val_len = val if mva: val_len = len ( val ) req.append ( pack('>L',val_len ) ) if mva: for vals in val: req.append ( pack ('>L',vals) ) # connect, send query, get response sock = self._Connect() if not sock: return None req = ''.join(req) length = len(req) req = pack ( '>2HL', SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, length ) + req wrote = sock.send ( req ) response = self._GetResponse ( sock, VER_COMMAND_UPDATE ) if not response: return -1 # parse response updated = unpack ( '>L', response[0:4] )[0] return updated def BuildKeywords ( self, query, index, hits ): """ Connect to searchd server, and generate keywords list for a given query. Returns None on failure, or a list of keywords on success. """ assert ( isinstance ( query, str ) ) assert ( isinstance ( index, str ) ) assert ( isinstance ( hits, int ) ) # build request req = [ pack ( '>L', len(query) ) + query ] req.append ( pack ( '>L', len(index) ) + index ) req.append ( pack ( '>L', hits ) ) # connect, send query, get response sock = self._Connect() if not sock: return None req = ''.join(req) length = len(req) req = pack ( '>2HL', SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, length ) + req wrote = sock.send ( req ) response = self._GetResponse ( sock, VER_COMMAND_KEYWORDS ) if not response: return None # parse response res = [] nwords = unpack ( '>L', response[0:4] )[0] p = 4 max_ = len(response) while nwords>0 and pL', response[p:p+4] )[0] p += 4 tokenized = response[p:p+length] p += length length = unpack ( '>L', response[p:p+4] )[0] p += 4 normalized = response[p:p+length] p += length entry = { 'tokenized':tokenized, 'normalized':normalized } if hits: entry['docs'], entry['hits'] = unpack ( '>2L', response[p:p+8] ) p += 8 res.append ( entry ) if nwords>0 or p>max_: self._error = 'incomplete reply' return None return res def Status ( self ): """ Get the status """ # connect, send query, get response sock = self._Connect() if not sock: return None req = pack ( '>2HLL', SEARCHD_COMMAND_STATUS, VER_COMMAND_STATUS, 4, 1 ) wrote = sock.send ( req ) response = self._GetResponse ( sock, VER_COMMAND_STATUS ) if not response: return None # parse response res = [] p = 8 max_ = len(response) while pL', response[p:p+4] )[0] k = response[p+4:p+length+4] p += 4+length length = unpack ( '>L', response[p:p+4] )[0] v = response[p+4:p+length+4] p += 4+length res += [[k, v]] return res ### persistent connections def Open(self): if self._socket: self._error = 'already connected' return None server = self._Connect() if not server: return None # command, command version = 0, body length = 4, body = 1 request = pack ( '>hhII', SEARCHD_COMMAND_PERSIST, 0, 4, 1 ) server.send ( request ) self._socket = server return True def Close(self): if not self._socket: self._error = 'not connected' return self._socket.close() self._socket = None def EscapeString(self, string): return re.sub(r"([=\(\)|\-!@~\"&/\\\^\$\=])", r"\\\1", string) def FlushAttributes(self): sock = self._Connect() if not sock: return -1 request = pack ( '>hhI', SEARCHD_COMMAND_FLUSHATTRS, VER_COMMAND_FLUSHATTRS, 0 ) # cmd, ver, bodylen sock.send ( request ) response = self._GetResponse ( sock, VER_COMMAND_FLUSHATTRS ) if not response or len(response)!=4: self._error = 'unexpected response length' return -1 tag = unpack ( '>L', response[0:4] )[0] return tag def AssertInt32 ( value ): assert(isinstance(value, (int, long))) assert(value>=-2**32-1 and value<=2**32-1) def AssertUInt32 ( value ): assert(isinstance(value, (int, long))) assert(value>=0 and value<=2**32-1) # # $Id: sphinxapi.py 3087 2012-01-30 23:07:35Z shodan $ # sphinx-2.0.4-release/api/ruby/0000755000176700017710000000000011724063141015474 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/test.rb0000644000176700017710000000122211301034527016772 0ustar deogardeogar# # $Id$ # require 'init.rb' q = ARGV.join(' ') @sphinx = Sphinx::Client.new # @sphinx.SetSortMode(Sphinx::Client::SPH_SORT_ATTR_ASC, 'created_at') results = @sphinx.Query(q) puts "Query '#{q}' retrieved #{results['total']} of #{results['total_found']} matches in #{results['time']} sec."; puts "Query stats:"; results['words'].each do |word, info| puts " '#{word}' found #{info['hits']} times in #{info['docs']} documents\n" end puts n = 1 results['matches'].each do |doc| print "#{n}. doc_id=#{doc['id']}, weight=#{doc['weight']}" doc['attrs'].each do |attr, value| print ", #{attr}=#{value}" end puts n = n+1 end sphinx-2.0.4-release/api/ruby/lib/0000755000176700017710000000000011724063141016242 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/lib/sphinx.rb0000644000176700017710000000025311220006630020067 0ustar deogardeogarrequire File.dirname(__FILE__) + '/sphinx/request' require File.dirname(__FILE__) + '/sphinx/response' require File.dirname(__FILE__) + '/sphinx/client' module Sphinx endsphinx-2.0.4-release/api/ruby/lib/sphinx/0000755000176700017710000000000011724063141017553 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/lib/sphinx/response.rb0000644000176700017710000000325511220006233021731 0ustar deogardeogarmodule Sphinx # Unpack internal Sphinx representation of ints, floats, strings, and arrays. # needed by Sphinx search engine. class Response # Initialize new request. def initialize(response) @response = response @position = 0 @size = response.length end # Gets current stream position. def position @position end # Gets response size. def size @size end # Returns true when response stream is out. def eof? @position >= @size end # Get int from stream. def get_int raise EOFError if @position + 4 > @size value = @response[@position, 4].unpack('N*').first @position += 4 return value end # Get 64-bit int from stream. def get_int64 raise EOFError if @position + 8 > @size hi, lo = @response[@position, 8].unpack('N*N*') @position += 8 return (hi << 32) + lo end # Get array of count ints from stream. def get_ints(count) length = 4 * count raise EOFError if @position + length > @size values = @response[@position, length].unpack('N*' * count) @position += length return values end # Get string from stream. def get_string length = get_int raise EOFError if @position + length > @size value = length > 0 ? @response[@position, length] : '' @position += length return value end # Get float from stream. def get_float raise EOFError if @position + 4 > @size uval = @response[@position, 4].unpack('N*').first; @position += 4 return ([uval].pack('L')).unpack('f*').first end end endsphinx-2.0.4-release/api/ruby/lib/sphinx/client.rb0000644000176700017710000011377611723657702021410 0ustar deogardeogar# = client.rb - Sphinx Client API # # Author:: Dmytro Shteflyuk . # Copyright:: Copyright (c) 2006 - 2008 Dmytro Shteflyuk # License:: Distributes under the same terms as Ruby # Version:: 0.9.9-r1299 # Website:: http://kpumuk.info/projects/ror-plugins/sphinx # # This library is distributed under the terms of the Ruby license. # You can freely distribute/modify this library. # ==Sphinx Client API # # The Sphinx Client API is used to communicate with searchd # daemon and get search results from Sphinx. # # ===Usage # # sphinx = Sphinx::Client.new # result = sphinx.Query('test') # ids = result['matches'].map { |match| match['id'] }.join(',') # posts = Post.find :all, :conditions => "id IN (#{ids})" # # docs = posts.map(&:body) # excerpts = sphinx.BuildExcerpts(docs, 'index', 'test') require 'socket' module Sphinx # :stopdoc: class SphinxError < StandardError; end class SphinxArgumentError < SphinxError; end class SphinxConnectError < SphinxError; end class SphinxResponseError < SphinxError; end class SphinxInternalError < SphinxError; end class SphinxTemporaryError < SphinxError; end class SphinxUnknownError < SphinxError; end # :startdoc: class Client # :stopdoc: # Known searchd commands # search command SEARCHD_COMMAND_SEARCH = 0 # excerpt command SEARCHD_COMMAND_EXCERPT = 1 # update command SEARCHD_COMMAND_UPDATE = 2 # keywords command SEARCHD_COMMAND_KEYWORDS = 3 # Current client-side command implementation versions # search command version VER_COMMAND_SEARCH = 0x119 # excerpt command version VER_COMMAND_EXCERPT = 0x102 # update command version VER_COMMAND_UPDATE = 0x102 # keywords command version VER_COMMAND_KEYWORDS = 0x100 # Known searchd status codes # general success, command-specific reply follows SEARCHD_OK = 0 # general failure, command-specific reply may follow SEARCHD_ERROR = 1 # temporaty failure, client should retry later SEARCHD_RETRY = 2 # general success, warning message and command-specific reply follow SEARCHD_WARNING = 3 # :startdoc: # Known match modes # match all query words SPH_MATCH_ALL = 0 # match any query word SPH_MATCH_ANY = 1 # match this exact phrase SPH_MATCH_PHRASE = 2 # match this boolean query SPH_MATCH_BOOLEAN = 3 # match this extended query SPH_MATCH_EXTENDED = 4 # match all document IDs w/o fulltext query, apply filters SPH_MATCH_FULLSCAN = 5 # extended engine V2 (TEMPORARY, WILL BE REMOVED IN 0.9.8-RELEASE) SPH_MATCH_EXTENDED2 = 6 # Known ranking modes (ext2 only) # default mode, phrase proximity major factor and BM25 minor one SPH_RANK_PROXIMITY_BM25 = 0 # statistical mode, BM25 ranking only (faster but worse quality) SPH_RANK_BM25 = 1 # no ranking, all matches get a weight of 1 SPH_RANK_NONE = 2 # simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts SPH_RANK_WORDCOUNT = 3 # phrase proximity SPH_RANK_PROXIMITY = 4 SPH_RANK_MATCHANY = 5 SPH_RANK_FIELDMASK = 6 SPH_RANK_SPH04 = 7 SPH_RANK_EXPR = 8 # Known sort modes # sort by document relevance desc, then by date SPH_SORT_RELEVANCE = 0 # sort by document date desc, then by relevance desc SPH_SORT_ATTR_DESC = 1 # sort by document date asc, then by relevance desc SPH_SORT_ATTR_ASC = 2 # sort by time segments (hour/day/week/etc) desc, then by relevance desc SPH_SORT_TIME_SEGMENTS = 3 # sort by SQL-like expression (eg. "@relevance DESC, price ASC, @id DESC") SPH_SORT_EXTENDED = 4 # sort by arithmetic expression in descending order (eg. "@id + max(@weight,1000)*boost + log(price)") SPH_SORT_EXPR = 5 # Known filter types # filter by integer values set SPH_FILTER_VALUES = 0 # filter by integer range SPH_FILTER_RANGE = 1 # filter by float range SPH_FILTER_FLOATRANGE = 2 # Known attribute types # this attr is just an integer SPH_ATTR_INTEGER = 1 # this attr is a timestamp SPH_ATTR_TIMESTAMP = 2 # this attr is an ordinal string number (integer at search time, # specially handled at indexing time) SPH_ATTR_ORDINAL = 3 # this attr is a boolean bit field SPH_ATTR_BOOL = 4 # this attr is a float SPH_ATTR_FLOAT = 5 # signed 64-bit integer SPH_ATTR_BIGINT = 6 # string SPH_ATTR_STRING = 7 # this attr has multiple values (0 or more) SPH_ATTR_MULTI = 0x40000001 SPH_ATTR_MULTI64 = 0x40000002 # Known grouping functions # group by day SPH_GROUPBY_DAY = 0 # group by week SPH_GROUPBY_WEEK = 1 # group by month SPH_GROUPBY_MONTH = 2 # group by year SPH_GROUPBY_YEAR = 3 # group by attribute value SPH_GROUPBY_ATTR = 4 # group by sequential attrs pair SPH_GROUPBY_ATTRPAIR = 5 # Constructs the Sphinx::Client object and sets options to their default values. def initialize # per-client-object settings @host = 'localhost' # searchd host (default is "localhost") @port = 9312 # searchd port (default is 9312) # per-query settings @offset = 0 # how many records to seek from result-set start (default is 0) @limit = 20 # how many records to return from result-set starting at offset (default is 20) @mode = SPH_MATCH_ALL # query matching mode (default is SPH_MATCH_ALL) @weights = [] # per-field weights (default is 1 for all fields) @sort = SPH_SORT_RELEVANCE # match sorting mode (default is SPH_SORT_RELEVANCE) @sortby = '' # attribute to sort by (defualt is "") @min_id = 0 # min ID to match (default is 0, which means no limit) @max_id = 0 # max ID to match (default is 0, which means no limit) @filters = [] # search filters @groupby = '' # group-by attribute name @groupfunc = SPH_GROUPBY_DAY # function to pre-process group-by attribute value with @groupsort = '@group desc' # group-by sorting clause (to sort groups in result set with) @groupdistinct = '' # group-by count-distinct attribute @maxmatches = 1000 # max matches to retrieve @cutoff = 0 # cutoff to stop searching at (default is 0) @retrycount = 0 # distributed retries count @retrydelay = 0 # distributed retries delay @anchor = [] # geographical anchor point @indexweights = [] # per-index weights @ranker = SPH_RANK_PROXIMITY_BM25 # ranking mode (default is SPH_RANK_PROXIMITY_BM25) @rankexpr = '' # ranker expression for SPH_RANK_EXPR @maxquerytime = 0 # max query time, milliseconds (default is 0, do not limit) @fieldweights = {} # per-field-name weights @overrides = [] # per-query attribute values overrides @select = '*' # select-list (attributes or expressions, with optional aliases) # per-reply fields (for single-query case) @error = '' # last error message @warning = '' # last warning message @reqs = [] # requests storage (for multi-query case) @mbenc = '' # stored mbstring encoding end # Get last error message. def GetLastError @error end # Get last warning message. def GetLastWarning @warning end # Set searchd host name (string) and port (integer). def SetServer(host, port) assert { host.instance_of? String } assert { port.instance_of? Fixnum } @host = host @port = port end # Set offset and count into result set, # and optionally set max-matches and cutoff limits. def SetLimits(offset, limit, max = 0, cutoff = 0) assert { offset.instance_of? Fixnum } assert { limit.instance_of? Fixnum } assert { max.instance_of? Fixnum } assert { offset >= 0 } assert { limit > 0 } assert { max >= 0 } @offset = offset @limit = limit @maxmatches = max if max > 0 @cutoff = cutoff if cutoff > 0 end # Set maximum query time, in milliseconds, per-index, # integer, 0 means "do not limit" def SetMaxQueryTime(max) assert { max.instance_of? Fixnum } assert { max >= 0 } @maxquerytime = max end # Set matching mode. def SetMatchMode(mode) assert { mode == SPH_MATCH_ALL \ || mode == SPH_MATCH_ANY \ || mode == SPH_MATCH_PHRASE \ || mode == SPH_MATCH_BOOLEAN \ || mode == SPH_MATCH_EXTENDED \ || mode == SPH_MATCH_FULLSCAN \ || mode == SPH_MATCH_EXTENDED2 } @mode = mode end # Set ranking mode. def SetRankingMode(ranker, rankexpr = '') assert { ranker == SPH_RANK_PROXIMITY_BM25 \ || ranker == SPH_RANK_BM25 \ || ranker == SPH_RANK_NONE \ || ranker == SPH_RANK_WORDCOUNT \ || ranker == SPH_RANK_PROXIMITY \ || ranker == SPH_RANK_MATCHANY \ || ranker == SPH_RANK_FIELDMASK \ || ranker == SPH_RANK_SPH04 \ || ranker == SPH_RANK_EXPR } @ranker = ranker @rankexpr = rankexpr end # Set matches sorting mode. def SetSortMode(mode, sortby = '') assert { mode == SPH_SORT_RELEVANCE \ || mode == SPH_SORT_ATTR_DESC \ || mode == SPH_SORT_ATTR_ASC \ || mode == SPH_SORT_TIME_SEGMENTS \ || mode == SPH_SORT_EXTENDED \ || mode == SPH_SORT_EXPR } assert { sortby.instance_of? String } assert { mode == SPH_SORT_RELEVANCE || !sortby.empty? } @sort = mode @sortby = sortby end # Bind per-field weights by order. # # DEPRECATED; use SetFieldWeights() instead. def SetWeights(weights) assert { weights.instance_of? Array } weights.each do |weight| assert { weight.instance_of? Fixnum } end @weights = weights end # Bind per-field weights by name. # # Takes string (field name) to integer name (field weight) hash as an argument. # * Takes precedence over SetWeights(). # * Unknown names will be silently ignored. # * Unbound fields will be silently given a weight of 1. def SetFieldWeights(weights) assert { weights.instance_of? Hash } weights.each do |name, weight| assert { name.instance_of? String } assert { weight.instance_of? Fixnum } end @fieldweights = weights end # Bind per-index weights by name. def SetIndexWeights(weights) assert { weights.instance_of? Hash } weights.each do |index, weight| assert { index.instance_of? String } assert { weight.instance_of? Fixnum } end @indexweights = weights end # Set IDs range to match. # # Only match records if document ID is beetwen min_id and max_id (inclusive). def SetIDRange(min, max) assert { min.instance_of?(Fixnum) or min.instance_of?(Bignum) } assert { max.instance_of?(Fixnum) or max.instance_of?(Bignum) } assert { min <= max } @min_id = min @max_id = max end # Set values filter. # # Only match those records where attribute column values # are in specified set. def SetFilter(attribute, values, exclude = false) assert { attribute.instance_of? String } assert { values.instance_of? Array } assert { !values.empty? } if values.instance_of?(Array) && values.size > 0 values.each do |value| assert { value.instance_of? Fixnum } end @filters << { 'type' => SPH_FILTER_VALUES, 'attr' => attribute, 'exclude' => exclude, 'values' => values } end end # Set range filter. # # Only match those records where attribute column value # is beetwen min and max (including min and max). def SetFilterRange(attribute, min, max, exclude = false) assert { attribute.instance_of? String } assert { min.instance_of? Fixnum or min.instance_of? Bignum } assert { max.instance_of? Fixnum or max.instance_of? Bignum } assert { min <= max } @filters << { 'type' => SPH_FILTER_RANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max } end # Set float range filter. # # Only match those records where attribute column value # is beetwen min and max (including min and max). def SetFilterFloatRange(attribute, min, max, exclude = false) assert { attribute.instance_of? String } assert { min.instance_of? Float } assert { max.instance_of? Float } assert { min <= max } @filters << { 'type' => SPH_FILTER_FLOATRANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max } end # Setup anchor point for geosphere distance calculations. # # Required to use @geodist in filters and sorting # distance will be computed to this point. Latitude and longitude # must be in radians. # # * attrlat -- is the name of latitude attribute # * attrlong -- is the name of longitude attribute # * lat -- is anchor point latitude, in radians # * long -- is anchor point longitude, in radians def SetGeoAnchor(attrlat, attrlong, lat, long) assert { attrlat.instance_of? String } assert { attrlong.instance_of? String } assert { lat.instance_of? Float } assert { long.instance_of? Float } @anchor = { 'attrlat' => attrlat, 'attrlong' => attrlong, 'lat' => lat, 'long' => long } end # Set grouping attribute and function. # # In grouping mode, all matches are assigned to different groups # based on grouping function value. # # Each group keeps track of the total match count, and the best match # (in this group) according to current sorting function. # # The final result set contains one best match per group, with # grouping function value and matches count attached. # # Groups in result set could be sorted by any sorting clause, # including both document attributes and the following special # internal Sphinx attributes: # # * @id - match document ID; # * @weight, @rank, @relevance - match weight; # * @group - groupby function value; # * @count - amount of matches in group. # # the default mode is to sort by groupby value in descending order, # ie. by '@group desc'. # # 'total_found' would contain total amount of matching groups over # the whole index. # # WARNING: grouping is done in fixed memory and thus its results # are only approximate; so there might be more groups reported # in total_found than actually present. @count might also # be underestimated. # # For example, if sorting by relevance and grouping by "published" # attribute with SPH_GROUPBY_DAY function, then the result set will # contain one most relevant match per each day when there were any # matches published, with day number and per-day match count attached, # and sorted by day number in descending order (ie. recent days first). def SetGroupBy(attribute, func, groupsort = '@group desc') assert { attribute.instance_of? String } assert { groupsort.instance_of? String } assert { func == SPH_GROUPBY_DAY \ || func == SPH_GROUPBY_WEEK \ || func == SPH_GROUPBY_MONTH \ || func == SPH_GROUPBY_YEAR \ || func == SPH_GROUPBY_ATTR \ || func == SPH_GROUPBY_ATTRPAIR } @groupby = attribute @groupfunc = func @groupsort = groupsort end # Set count-distinct attribute for group-by queries. def SetGroupDistinct(attribute) assert { attribute.instance_of? String } @groupdistinct = attribute end # Set distributed retries count and delay. def SetRetries(count, delay = 0) assert { count.instance_of? Fixnum } assert { delay.instance_of? Fixnum } @retrycount = count @retrydelay = delay end # Set attribute values override # # There can be only one override per attribute. # +values+ must be a hash that maps document IDs to attribute values. def SetOverride(attrname, attrtype, values) assert { attrname.instance_of? String } assert { [SPH_ATTR_INTEGER, SPH_ATTR_TIMESTAMP, SPH_ATTR_BOOL, SPH_ATTR_FLOAT, SPH_ATTR_BIGINT].include?(attrtype) } assert { values.instance_of? Hash } @overrides << { 'attr' => attrname, 'type' => attrtype, 'values' => values } end # Set select-list (attributes or expressions), SQL-like syntax. def SetSelect(select) assert { select.instance_of? String } @select = select end # Clear all filters (for multi-queries). def ResetFilters @filters = [] @anchor = [] end # Clear groupby settings (for multi-queries). def ResetGroupBy @groupby = '' @groupfunc = SPH_GROUPBY_DAY @groupsort = '@group desc' @groupdistinct = '' end # Clear all attribute value overrides (for multi-queries). def ResetOverrides @overrides = [] end # Connect to searchd server and run given search query. # # query is query string # index is index name (or names) to query. default value is "*" which means # to query all indexes. Accepted characters for index names are letters, numbers, # dash, and underscore; everything else is considered a separator. Therefore, # all the following calls are valid and will search two indexes: # # sphinx.Query('test query', 'main delta') # sphinx.Query('test query', 'main;delta') # sphinx.Query('test query', 'main, delta') # # Index order matters. If identical IDs are found in two or more indexes, # weight and attribute values from the very last matching index will be used # for sorting and returning to client. Therefore, in the example above, # matches from "delta" index will always "win" over matches from "main". # # Returns false on failure. # Returns hash which has the following keys on success: # # * 'matches' -- array of hashes {'weight', 'group', 'id'}, where 'id' is document_id. # * 'total' -- total amount of matches retrieved (upto SPH_MAX_MATCHES, see sphinx.h) # * 'total_found' -- total amount of matching documents in index # * 'time' -- search time # * 'words' -- hash which maps query terms (stemmed!) to ('docs', 'hits') hash def Query(query, index = '*', comment = '') assert { @reqs.empty? } @reqs = [] self.AddQuery(query, index, comment) results = self.RunQueries # probably network error; error message should be already filled return false unless results.instance_of?(Array) @error = results[0]['error'] @warning = results[0]['warning'] return false if results[0]['status'] == SEARCHD_ERROR return results[0] end # Add query to batch. # # Batch queries enable searchd to perform internal optimizations, # if possible; and reduce network connection overheads in all cases. # # For instance, running exactly the same query with different # groupby settings will enable searched to perform expensive # full-text search and ranking operation only once, but compute # multiple groupby results from its output. # # Parameters are exactly the same as in Query call. # Returns index to results array returned by RunQueries call. def AddQuery(query, index = '*', comment = '') # build request # mode and limits request = Request.new request.put_int @offset, @limit, @mode, @ranker # process the 'expr' ranker if @ranker == SPH_RANK_EXPR request.put_string @rankexpr end request.put_int @sort request.put_string @sortby # query itself request.put_string query # weights request.put_int_array @weights # indexes request.put_string index # id64 range marker request.put_int 1 # id64 range request.put_int64 @min_id.to_i, @max_id.to_i # filters request.put_int @filters.length @filters.each do |filter| request.put_string filter['attr'] request.put_int filter['type'] case filter['type'] when SPH_FILTER_VALUES request.put_int64_array filter['values'] when SPH_FILTER_RANGE request.put_int64 filter['min'], filter['max'] when SPH_FILTER_FLOATRANGE request.put_float filter['min'], filter['max'] else raise SphinxInternalError, 'Internal error: unhandled filter type' end request.put_int filter['exclude'] ? 1 : 0 end # group-by clause, max-matches count, group-sort clause, cutoff count request.put_int @groupfunc request.put_string @groupby request.put_int @maxmatches request.put_string @groupsort request.put_int @cutoff, @retrycount, @retrydelay request.put_string @groupdistinct # anchor point if @anchor.empty? request.put_int 0 else request.put_int 1 request.put_string @anchor['attrlat'], @anchor['attrlong'] request.put_float @anchor['lat'], @anchor['long'] end # per-index weights request.put_int @indexweights.length @indexweights.each do |idx, weight| request.put_string idx request.put_int weight end # max query time request.put_int @maxquerytime # per-field weights request.put_int @fieldweights.length @fieldweights.each do |field, weight| request.put_string field request.put_int weight end # comment request.put_string comment # attribute overrides request.put_int @overrides.length for entry in @overrides do request.put_string entry['attr'] request.put_int entry['type'], entry['values'].size entry['values'].each do |id, val| assert { id.instance_of?(Fixnum) || id.instance_of?(Bignum) } assert { val.instance_of?(Fixnum) || val.instance_of?(Bignum) || val.instance_of?(Float) } request.put_int64 id case entry['type'] when SPH_ATTR_FLOAT request.put_float val when SPH_ATTR_BIGINT request.put_int64 val else request.put_int val end end end # select-list request.put_string @select # store request to requests array @reqs << request.to_s; return @reqs.length - 1 end # Run queries batch. # # Returns an array of result sets on success. # Returns false on network IO failure. # # Each result set in returned array is a hash which containts # the same keys as the hash returned by Query, plus: # # * 'error' -- search error for this query # * 'words' -- hash which maps query terms (stemmed!) to ( "docs", "hits" ) hash def RunQueries if @reqs.empty? @error = 'No queries defined, issue AddQuery() first' return false end req = @reqs.join('') nreqs = @reqs.length @reqs = [] response = PerformRequest(:search, req, nreqs) # parse response begin results = [] ires = 0 while ires < nreqs ires += 1 result = {} result['error'] = '' result['warning'] = '' # extract status status = result['status'] = response.get_int if status != SEARCHD_OK message = response.get_string if status == SEARCHD_WARNING result['warning'] = message else result['error'] = message results << result next end end # read schema fields = [] attrs = {} attrs_names_in_order = [] nfields = response.get_int while nfields > 0 nfields -= 1 fields << response.get_string end result['fields'] = fields nattrs = response.get_int while nattrs > 0 nattrs -= 1 attr = response.get_string type = response.get_int attrs[attr] = type attrs_names_in_order << attr end result['attrs'] = attrs # read match count count = response.get_int id64 = response.get_int # read matches result['matches'] = [] while count > 0 count -= 1 if id64 != 0 doc = response.get_int64 weight = response.get_int else doc, weight = response.get_ints(2) end r = {} # This is a single result put in the result['matches'] array r['id'] = doc r['weight'] = weight attrs_names_in_order.each do |a| r['attrs'] ||= {} case attrs[a] when SPH_ATTR_BIGINT # handle 64-bit ints r['attrs'][a] = response.get_int64 when SPH_ATTR_FLOAT # handle floats r['attrs'][a] = response.get_float when SPH_ATTR_STRING # handle string r['attrs'][a] = response.get_string else # handle everything else as unsigned ints val = response.get_int if attrs[a]==SPH_ATTR_MULTI r['attrs'][a] = [] 1.upto(val) do r['attrs'][a] << response.get_int end elsif attrs[a]==SPH_ATTR_MULTI64 r['attrs'][a] = [] val = val/2 1.upto(val) do r['attrs'][a] << response.get_int64 end else r['attrs'][a] = val end end end result['matches'] << r end result['total'], result['total_found'], msecs, words = response.get_ints(4) result['time'] = '%.3f' % (msecs / 1000.0) result['words'] = {} while words > 0 words -= 1 word = response.get_string docs, hits = response.get_ints(2) result['words'][word] = { 'docs' => docs, 'hits' => hits } end results << result end #rescue EOFError # @error = 'incomplete reply' # raise SphinxResponseError, @error end return results end # Connect to searchd server and generate exceprts from given documents. # # * docs -- an array of strings which represent the documents' contents # * index -- a string specifiying the index which settings will be used # for stemming, lexing and case folding # * words -- a string which contains the words to highlight # * opts is a hash which contains additional optional highlighting parameters. # # You can use following parameters: # * 'before_match' -- a string to insert before a set of matching words, default is "" # * 'after_match' -- a string to insert after a set of matching words, default is "" # * 'chunk_separator' -- a string to insert between excerpts chunks, default is " ... " # * 'limit' -- max excerpt size in symbols (codepoints), default is 256 # * 'around' -- how much words to highlight around each match, default is 5 # * 'exact_phrase' -- whether to highlight exact phrase matches only, default is false # * 'single_passage' -- whether to extract single best passage only, default is false # * 'use_boundaries' -- whether to extract passages by phrase boundaries setup in tokenizer # * 'weight_order' -- whether to order best passages in document (default) or weight order # # Returns false on failure. # Returns an array of string excerpts on success. def BuildExcerpts(docs, index, words, opts = {}) assert { docs.instance_of? Array } assert { index.instance_of? String } assert { words.instance_of? String } assert { opts.instance_of? Hash } # fixup options opts['before_match'] ||= ''; opts['after_match'] ||= ''; opts['chunk_separator'] ||= ' ... '; opts['html_strip_mode'] ||= 'index'; opts['limit'] ||= 256; opts['limit_passages'] ||= 0; opts['limit_words'] ||= 0; opts['around'] ||= 5; opts['start_passage_id'] ||= 1; opts['exact_phrase'] ||= false opts['single_passage'] ||= false opts['use_boundaries'] ||= false opts['weight_order'] ||= false opts['load_files'] ||= false opts['allow_empty'] ||= false # build request # v.1.0 req flags = 1 flags |= 2 if opts['exact_phrase'] flags |= 4 if opts['single_passage'] flags |= 8 if opts['use_boundaries'] flags |= 16 if opts['weight_order'] flags |= 32 if opts['query_mode'] flags |= 64 if opts['force_all_words'] flags |= 128 if opts['load_files'] flags |= 256 if opts['allow_empty'] request = Request.new request.put_int 0, flags # mode=0, flags=1 (remove spaces) # req index request.put_string index # req words request.put_string words # options request.put_string opts['before_match'] request.put_string opts['after_match'] request.put_string opts['chunk_separator'] request.put_int opts['limit'].to_i, opts['around'].to_i # options v1.2 request.put_int opts['limit_passages'].to_i request.put_int opts['limit_words'].to_i request.put_int opts['start_passage_id'].to_i request.put_string opts['html_strip_mode'] # documents request.put_int docs.size docs.each do |doc| assert { doc.instance_of? String } request.put_string doc end response = PerformRequest(:excerpt, request) # parse response begin res = [] docs.each do |doc| res << response.get_string end rescue EOFError @error = 'incomplete reply' raise SphinxResponseError, @error end return res end # Connect to searchd server, and generate keyword list for a given query. # # Returns an array of words on success. def BuildKeywords(query, index, hits) assert { query.instance_of? String } assert { index.instance_of? String } assert { hits.instance_of?(TrueClass) || hits.instance_of?(FalseClass) } # build request request = Request.new # v.1.0 req request.put_string query # req query request.put_string index # req index request.put_int hits ? 1 : 0 response = PerformRequest(:keywords, request) # parse response begin res = [] nwords = response.get_int 0.upto(nwords - 1) do |i| tokenized = response.get_string normalized = response.get_string entry = { 'tokenized' => tokenized, 'normalized' => normalized } entry['docs'], entry['hits'] = response.get_ints(2) if hits res << entry end rescue EOFError @error = 'incomplete reply' raise SphinxResponseError, @error end return res end # Batch update given attributes in given rows in given indexes. # # * +index+ is a name of the index to be updated # * +attrs+ is an array of attribute name strings. # * +values+ is a hash where key is document id, and value is an array of # * +mva+ identifies whether update MVA # new attribute values # # Returns number of actually updated documents (0 or more) on success. # Returns -1 on failure. # # Usage example: # sphinx.UpdateAttributes('test1', ['group_id'], { 1 => [456] }) def UpdateAttributes(index, attrs, values, mva = false) # verify everything assert { index.instance_of? String } assert { mva.instance_of?(TrueClass) || mva.instance_of?(FalseClass) } assert { attrs.instance_of? Array } attrs.each do |attr| assert { attr.instance_of? String } end assert { values.instance_of? Hash } values.each do |id, entry| assert { id.instance_of? Fixnum } assert { entry.instance_of? Array } assert { entry.length == attrs.length } entry.each do |v| if mva assert { v.instance_of? Array } v.each { |vv| assert { vv.instance_of? Fixnum } } else assert { v.instance_of? Fixnum } end end end # build request request = Request.new request.put_string index request.put_int attrs.length for attr in attrs request.put_string attr request.put_int mva ? 1 : 0 end request.put_int values.length values.each do |id, entry| request.put_int64 id if mva entry.each { |v| request.put_int_array v } else request.put_int(*entry) end end response = PerformRequest(:update, request) # parse response begin return response.get_int rescue EOFError @error = 'incomplete reply' raise SphinxResponseError, @error end end protected # Connect to searchd server. def Connect begin if @host[0,1]=='/' sock = UNIXSocket.new(@host) else sock = TCPSocket.new(@host, @port) end rescue => err @error = "connection to #{@host}:#{@port} failed (error=#{err})" raise SphinxConnectError, @error end v = sock.recv(4).unpack('N*').first if v < 1 sock.close @error = "expected searchd protocol version 1+, got version '#{v}'" raise SphinxConnectError, @error end sock.send([1].pack('N'), 0) sock end # Get and check response packet from searchd server. def GetResponse(sock, client_version) response = '' len = 0 header = sock.recv(8) if header.length == 8 status, ver, len = header.unpack('n2N') left = len.to_i while left > 0 do begin chunk = sock.recv(left) if chunk response << chunk left -= chunk.length end rescue EOFError break end end end sock.close # check response read = response.length if response.empty? or read != len.to_i @error = response.empty? \ ? 'received zero-sized searchd response' \ : "failed to read searchd response (status=#{status}, ver=#{ver}, len=#{len}, read=#{read})" raise SphinxResponseError, @error end # check status if (status == SEARCHD_WARNING) wlen = response[0, 4].unpack('N*').first @warning = response[4, wlen] return response[4 + wlen, response.length - 4 - wlen] end if status == SEARCHD_ERROR @error = 'searchd error: ' + response[4, response.length - 4] raise SphinxInternalError, @error end if status == SEARCHD_RETRY @error = 'temporary searchd error: ' + response[4, response.length - 4] raise SphinxTemporaryError, @error end unless status == SEARCHD_OK @error = "unknown status code: '#{status}'" raise SphinxUnknownError, @error end # check version if ver < client_version @warning = "searchd command v.#{ver >> 8}.#{ver & 0xff} older than client's " + "v.#{client_version >> 8}.#{client_version & 0xff}, some options might not work" end return response end # Connect, send query, get response. def PerformRequest(command, request, additional = nil) cmd = command.to_s.upcase command_id = Sphinx::Client.const_get('SEARCHD_COMMAND_' + cmd) command_ver = Sphinx::Client.const_get('VER_COMMAND_' + cmd) sock = self.Connect len = request.to_s.length + (additional != nil ? 8 : 0) header = [command_id, command_ver, len].pack('nnN') header << [0, additional].pack('NN') if additional != nil sock.send(header + request.to_s, 0) response = self.GetResponse(sock, command_ver) return Response.new(response) end # :stopdoc: def assert raise 'Assertion failed!' unless yield if $DEBUG end # :startdoc: end end sphinx-2.0.4-release/api/ruby/lib/sphinx/request.rb0000644000176700017710000000243411045547240021576 0ustar deogardeogarmodule Sphinx # Pack ints, floats, strings, and arrays to internal representation # needed by Sphinx search engine. class Request # Initialize new request. def initialize @request = '' end # Put int(s) to request. def put_int(*ints) ints.each { |i| @request << [i].pack('N') } end # Put 64-bit int(s) to request. def put_int64(*ints) ints.each { |i| @request << [i].pack('q').reverse }#[i >> 32, i & ((1 << 32) - 1)].pack('NN') } end # Put string(s) to request (first length, then the string itself). def put_string(*strings) strings.each { |s| @request << [s.length].pack('N') + s } end # Put float(s) to request. def put_float(*floats) floats.each do |f| t1 = [f].pack('f') # machine order t2 = t1.unpack('L*').first # int in machine order @request << [t2].pack('N') end end # Put array of ints to request (first length, then the array itself) def put_int_array(arr) put_int arr.length, *arr end # Put array of 64-bit ints to request (first length, then the array itself) def put_int64_array(arr) put_int arr.length put_int64(*arr) end # Returns the entire message def to_s @request end end end sphinx-2.0.4-release/api/ruby/sphinx.yml.tpl0000644000176700017710000000013011006762325020323 0ustar deogardeogarconfig_file: /opt/sphinx/etc/sphinx.conf root_dir: /opt/sphinx/bin indexes: test1 test2 sphinx-2.0.4-release/api/ruby/README.rdoc0000644000176700017710000000230511062775602017311 0ustar deogardeogar=Sphinx Client API 0.9.9-dev (r1299) This document gives an overview of what is Sphinx itself and how to use in within Ruby on Rails. For more information or documentation, please go to http://www.sphinxsearch.com ==Sphinx Sphinx is a standalone full-text search engine, meant to provide fast, size-efficient and relevant fulltext search functions to other applications. Sphinx was specially designed to integrate well with SQL databases and scripting languages. Currently built-in data sources support fetching data either via direct connection to MySQL, or from an XML pipe. Simplest way to communicate with Sphinx is to use searchd - a daemon to search through fulltext indices from external software. ==Documentation You can create the documentation by running: rake rdoc ==Latest version You can always get latest version from http://kpumuk.info/projects/ror-plugins/sphinx ==Credits Dmytro Shteflyuk http://kpumuk.info Andrew Aksyonoff http://sphinxsearch.com/ Special thanks to Alexey Kovyrin http://blog.kovyrin.net ==License This library is distributed under the terms of the Ruby license. You can freely distribute/modify this library. sphinx-2.0.4-release/api/ruby/init.rb0000644000176700017710000000005711006762325016771 0ustar deogardeogarrequire File.dirname(__FILE__) + '/lib/sphinx' sphinx-2.0.4-release/api/ruby/Rakefile0000644000176700017710000000103211062775604017146 0ustar deogardeogarrequire 'rake' require 'spec/rake/spectask' require 'rake/rdoctask' desc 'Default: run unit tests.' task :default => :spec desc 'Test the sphinx plugin.' Spec::Rake::SpecTask.new(:spec) do |t| t.libs << 'lib' t.pattern = 'spec/*_spec.rb' end desc 'Generate documentation for the sphinx plugin.' Rake::RDocTask.new(:rdoc) do |rdoc| rdoc.rdoc_dir = 'rdoc' rdoc.title = 'Sphinx Client API' rdoc.options << '--line-numbers' << '--inline-source' rdoc.rdoc_files.include('README') rdoc.rdoc_files.include('lib/**/*.rb') end sphinx-2.0.4-release/api/ruby/tasks/0000755000176700017710000000000011724063141016621 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/tasks/sphinx.rake0000644000176700017710000000435411006762325021007 0ustar deogardeogarnamespace :sphinx do desc 'Run indexer for configured indexes' task :index do config = load_config if config[:indexes] system "#{config[:root_dir]}/indexer --config \"#{config[:config_file]}\" #{config[:indexes]}" else puts 'You should specify indexes in sphinx.yml' end end desc 'Run indexer for all indexes' task :index_all do config = load_config system "#{config[:root_dir]}/indexer --config \"#{config[:config_file]}\" --all" end desc 'Rotate configured indexes and restart searchd server' task :rotate do config = load_config if config[:indexes] system "#{config[:root_dir]}/indexer --config \"#{config[:config_file]}\" --rotate #{config[:indexes]}" else puts 'You should specify indexes in sphinx.yml' end end desc 'Rotate all indexes and restart searchd server' task :rotate_all do config = load_config system "#{config[:root_dir]}/indexer --config \"#{config[:config_file]}\" --rotate --all" end desc 'Start searchd server' task :start do config = load_config if File.exists?(config[:pid_file]) puts 'Sphinx searchd server is already started.' else system "#{config[:root_dir]}/searchd --config \"#{config[:config_file]}\"" puts 'Sphinx searchd server started.' end end desc 'Stop searchd server' task :stop do config = load_config unless File.exists?(config[:pid_file]) puts 'Sphinx searchd server is not running.' else pid = File.read(config[:pid_file]).chomp kill 'SIGHUP', pid puts 'Sphinx searchd server stopped.' end end desc 'Restart searchd server' task :restart => [:stop, :start] def load_config return @sphinx_config if @sphinx_config options = YAML.load_file(File.dirname(__FILE__) + '/../../../../config/sphinx.yml') rescue {} @sphinx_config = { :config_file => options['config_file'] || '/etc/sphinx.conf', :root_dir => options['root_dir'] || '/usr/bin', :indexes => options['indexes'] } sphinx_config = File.read(@sphinx_config[:config_file]) rescue '' sphinx_config =~ /searchd\s*{.*pid_file\s*=\s*(.*?)\n.*}/m @sphinx_config[:pid_file] = $1 || '/var/run/searchd.pid' return @sphinx_config end end sphinx-2.0.4-release/api/ruby/spec/0000755000176700017710000000000011724063141016426 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/spec/fixtures/0000755000176700017710000000000011724063141020277 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/spec/fixtures/limits_max_cutoff.php0000644000176700017710000000016511006762325024531 0ustar deogardeogarSetLimits(10, 20, 30, 40); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/excerpt_custom.php0000644000176700017710000000067111006762325024063 0ustar deogardeogarBuildExcerpts(array('10', '20'), 'index', 'word1 word2', array('before_match' => 'before', 'after_match' => 'after', 'chunk_separator' => 'separator', 'limit' => 10)); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_week.php0000644000176700017710000000020011006762325023644 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_WEEK); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/sphinxapi.php0000644000176700017710000010653411711621267023031 0ustar deogardeogar=8 ) { $i = (int)$v; return pack ( "NN", $i>>32, $i&((1<<32)-1) ); } // x32 route, bcmath $x = "4294967296"; if ( function_exists("bcmul") ) { $h = bcdiv ( $v, $x, 0 ); $l = bcmod ( $v, $x ); if ( $v<0 ) { $h = -1+(float)$h; $l = $l+(float)$x; } return pack ( "NN", (float)$h, (float)$l ); // conversion to float is intentional; int would lose 31st bit } // x32 route, 15 or less decimal digits // we can use float, because its actually double and has 52 precision bits if ( strlen($v)<=15 ) { $f = (float)$v; $h = (int)($f/$x); $l = $f-$x*(float)$h; if ( $v<0 ) { $h = -1+(float)$h; $l = $l+(float)$x; } return pack ( "NN", $h, $l ); } // x32 route, 16 or more decimal digits // well, let me know if you *really* need this die ( "INTERNAL ERROR: packing more than 15-digit numeric on 32-bit PHP is not implemented yet (contact support)" ); } /// portably unpack 64 signed bits, network order to numeric function sphUnpack64 ( $v ) { list($h,$l) = array_values ( unpack ( "N*N*", $v ) ); // x64 route if ( PHP_INT_SIZE>=8 ) { if ( $h<0 ) $h += (1<<32); // because php 5.2.2 to 5.2.5 is totally fucked up again if ( $l<0 ) $l += (1<<32); return ($h<<32) + $l; } // x32 route $x = "4294967296"; $y = 0; $p = ""; if ( $h<0 ) { $h = ~$h; $l = ~$l; $y = 1; $p = "-"; } $h = sprintf ( "%u", $h ); $l = sprintf ( "%u", $l ); // bcmath if ( function_exists("bcmul") ) return $p . bcadd ( bcadd ( $l, bcmul ( $x, $h ) ), $y ); // no bcmath, 15 or less decimal digits // we can use float, because its actually double and has 52 precision bits if ( $h<1048576 ) { $f = ((float)$h)*$x + (float)$l + (float)$y; return $p . sprintf ( "%.0f", $f ); // builtin conversion is only about 39-40 bits precise! } // x32 route, 16 or more decimal digits // well, let me know if you *really* need this die ( "INTERNAL ERROR: unpacking more than 15-digit numeric on 32-bit PHP is not implemented yet (contact support)" ); } /// sphinx searchd client class class SphinxClient { var $_host; ///< searchd host (default is "localhost") var $_port; ///< searchd port (default is 9312) var $_offset; ///< how many records to seek from result-set start (default is 0) var $_limit; ///< how many records to return from result-set starting at offset (default is 20) var $_mode; ///< query matching mode (default is SPH_MATCH_ALL) var $_weights; ///< per-field weights (default is 1 for all fields) var $_sort; ///< match sorting mode (default is SPH_SORT_RELEVANCE) var $_sortby; ///< attribute to sort by (defualt is "") var $_min_id; ///< min ID to match (default is 0, which means no limit) var $_max_id; ///< max ID to match (default is 0, which means no limit) var $_filters; ///< search filters var $_groupby; ///< group-by attribute name var $_groupfunc; ///< group-by function (to pre-process group-by attribute value with) var $_groupsort; ///< group-by sorting clause (to sort groups in result set with) var $_groupdistinct;///< group-by count-distinct attribute var $_maxmatches; ///< max matches to retrieve var $_cutoff; ///< cutoff to stop searching at (default is 0) var $_retrycount; ///< distributed retries count var $_retrydelay; ///< distributed retries delay var $_anchor; ///< geographical anchor point var $_indexweights; ///< per-index weights var $_ranker; ///< ranking mode (default is SPH_RANK_PROXIMITY_BM25) var $_maxquerytime; ///< max query time, milliseconds (default is 0, do not limit) var $_fieldweights; ///< per-field-name weights var $_overrides; ///< per-query attribute values overrides var $_select; ///< select-list (attributes or expressions, with optional aliases) var $_error; ///< last error message var $_warning; ///< last warning message var $_reqs; ///< requests array for multi-query var $_mbenc; ///< stored mbstring encoding var $_arrayresult; ///< whether $result["matches"] should be a hash or an array ///////////////////////////////////////////////////////////////////////////// // common stuff ///////////////////////////////////////////////////////////////////////////// /// create a new client object and fill defaults function SphinxClient () { // per-client-object settings $this->_host = "localhost"; $this->_port = 9312; // per-query settings $this->_offset = 0; $this->_limit = 20; $this->_mode = SPH_MATCH_ALL; $this->_weights = array (); $this->_sort = SPH_SORT_RELEVANCE; $this->_sortby = ""; $this->_min_id = 0; $this->_max_id = 0; $this->_filters = array (); $this->_groupby = ""; $this->_groupfunc = SPH_GROUPBY_DAY; $this->_groupsort = "@group desc"; $this->_groupdistinct= ""; $this->_maxmatches = 1000; $this->_cutoff = 0; $this->_retrycount = 0; $this->_retrydelay = 0; $this->_anchor = array (); $this->_indexweights= array (); $this->_ranker = SPH_RANK_PROXIMITY_BM25; $this->_maxquerytime= 0; $this->_fieldweights= array(); $this->_overrides = array(); $this->_select = "*"; $this->_error = ""; // per-reply fields (for single-query case) $this->_warning = ""; $this->_reqs = array (); // requests storage (for multi-query case) $this->_mbenc = ""; $this->_arrayresult = false; } /// get last error message (string) function GetLastError () { return $this->_error; } /// get last warning message (string) function GetLastWarning () { return $this->_warning; } /// set searchd host name (string) and port (integer) function SetServer ( $host, $port ) { assert ( is_string($host) ); assert ( is_int($port) ); $this->_host = $host; $this->_port = $port; } ///////////////////////////////////////////////////////////////////////////// /// enter mbstring workaround mode function _MBPush () { $this->_mbenc = ""; if ( ini_get ( "mbstring.func_overload" ) & 2 ) { $this->_mbenc = mb_internal_encoding(); mb_internal_encoding ( "latin1" ); } } /// leave mbstring workaround mode function _MBPop () { if ( $this->_mbenc ) mb_internal_encoding ( $this->_mbenc ); } /// connect to searchd server function _Connect () { return fopen('php://stdout', 'w'); } function _OldConnect() { if (!( $fp = @fsockopen ( $this->_host, $this->_port ) ) ) { $this->_error = "connection to {$this->_host}:{$this->_port} failed"; return false; } // check version list(,$v) = unpack ( "N*", fread ( $fp, 4 ) ); $v = (int)$v; if ( $v<1 ) { fclose ( $fp ); $this->_error = "expected searchd protocol version 1+, got version '$v'"; return false; } // all ok, send my version fwrite ( $fp, pack ( "N", 1 ) ); return $fp; } /// get and check response packet from searchd server function _GetResponse ( $fp, $client_ver ) { return false; } function _OldGetResponse ( $fp, $client_ver ) { $response = ""; $len = 0; $header = fread ( $fp, 8 ); if ( strlen($header)==8 ) { list ( $status, $ver, $len ) = array_values ( unpack ( "n2a/Nb", $header ) ); $left = $len; while ( $left>0 && !feof($fp) ) { $chunk = fread ( $fp, $left ); if ( $chunk ) { $response .= $chunk; $left -= strlen($chunk); } } } fclose ( $fp ); // check response $read = strlen ( $response ); if ( !$response || $read!=$len ) { $this->_error = $len ? "failed to read searchd response (status=$status, ver=$ver, len=$len, read=$read)" : "received zero-sized searchd response"; return false; } // check status if ( $status==SEARCHD_WARNING ) { list(,$wlen) = unpack ( "N*", substr ( $response, 0, 4 ) ); $this->_warning = substr ( $response, 4, $wlen ); return substr ( $response, 4+$wlen ); } if ( $status==SEARCHD_ERROR ) { $this->_error = "searchd error: " . substr ( $response, 4 ); return false; } if ( $status==SEARCHD_RETRY ) { $this->_error = "temporary searchd error: " . substr ( $response, 4 ); return false; } if ( $status!=SEARCHD_OK ) { $this->_error = "unknown status code '$status'"; return false; } // check version if ( $ver<$client_ver ) { $this->_warning = sprintf ( "searchd command v.%d.%d older than client's v.%d.%d, some options might not work", $ver>>8, $ver&0xff, $client_ver>>8, $client_ver&0xff ); } return $response; } ///////////////////////////////////////////////////////////////////////////// // searching ///////////////////////////////////////////////////////////////////////////// /// set offset and count into result set, /// and optionally set max-matches and cutoff limits function SetLimits ( $offset, $limit, $max=0, $cutoff=0 ) { assert ( is_int($offset) ); assert ( is_int($limit) ); assert ( $offset>=0 ); assert ( $limit>0 ); assert ( $max>=0 ); $this->_offset = $offset; $this->_limit = $limit; if ( $max>0 ) $this->_maxmatches = $max; if ( $cutoff>0 ) $this->_cutoff = $cutoff; } /// set maximum query time, in milliseconds, per-index /// integer, 0 means "do not limit" function SetMaxQueryTime ( $max ) { assert ( is_int($max) ); assert ( $max>=0 ); $this->_maxquerytime = $max; } /// set matching mode function SetMatchMode ( $mode ) { assert ( $mode==SPH_MATCH_ALL || $mode==SPH_MATCH_ANY || $mode==SPH_MATCH_PHRASE || $mode==SPH_MATCH_BOOLEAN || $mode==SPH_MATCH_EXTENDED || $mode==SPH_MATCH_FULLSCAN || $mode==SPH_MATCH_EXTENDED2 ); $this->_mode = $mode; } /// set ranking mode function SetRankingMode ( $ranker ) { assert ( $ranker==SPH_RANK_PROXIMITY_BM25 || $ranker==SPH_RANK_BM25 || $ranker==SPH_RANK_NONE || $ranker==SPH_RANK_WORDCOUNT || $ranker==SPH_RANK_PROXIMITY ); $this->_ranker = $ranker; } /// set matches sorting mode function SetSortMode ( $mode, $sortby="" ) { assert ( $mode==SPH_SORT_RELEVANCE || $mode==SPH_SORT_ATTR_DESC || $mode==SPH_SORT_ATTR_ASC || $mode==SPH_SORT_TIME_SEGMENTS || $mode==SPH_SORT_EXTENDED || $mode==SPH_SORT_EXPR ); assert ( is_string($sortby) ); assert ( $mode==SPH_SORT_RELEVANCE || strlen($sortby)>0 ); $this->_sort = $mode; $this->_sortby = $sortby; } /// bind per-field weights by order /// DEPRECATED; use SetFieldWeights() instead function SetWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $weight ) assert ( is_int($weight) ); $this->_weights = $weights; } /// bind per-field weights by name function SetFieldWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $name=>$weight ) { assert ( is_string($name) ); assert ( is_int($weight) ); } $this->_fieldweights = $weights; } /// bind per-index weights by name function SetIndexWeights ( $weights ) { assert ( is_array($weights) ); foreach ( $weights as $index=>$weight ) { assert ( is_string($index) ); assert ( is_int($weight) ); } $this->_indexweights = $weights; } /// set IDs range to match /// only match records if document ID is beetwen $min and $max (inclusive) function SetIDRange ( $min, $max ) { assert ( is_numeric($min) ); assert ( is_numeric($max) ); assert ( $min<=$max ); $this->_min_id = $min; $this->_max_id = $max; } /// set values set filter /// only match records where $attribute value is in given set function SetFilter ( $attribute, $values, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_array($values) ); assert ( count($values) ); if ( is_array($values) && count($values) ) { foreach ( $values as $value ) assert ( is_numeric($value) ); $this->_filters[] = array ( "type"=>SPH_FILTER_VALUES, "attr"=>$attribute, "exclude"=>$exclude, "values"=>$values ); } } /// set range filter /// only match records if $attribute value is beetwen $min and $max (inclusive) function SetFilterRange ( $attribute, $min, $max, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_numeric($min) ); assert ( is_numeric($max) ); assert ( $min<=$max ); $this->_filters[] = array ( "type"=>SPH_FILTER_RANGE, "attr"=>$attribute, "exclude"=>$exclude, "min"=>$min, "max"=>$max ); } /// set float range filter /// only match records if $attribute value is beetwen $min and $max (inclusive) function SetFilterFloatRange ( $attribute, $min, $max, $exclude=false ) { assert ( is_string($attribute) ); assert ( is_float($min) ); assert ( is_float($max) ); assert ( $min<=$max ); $this->_filters[] = array ( "type"=>SPH_FILTER_FLOATRANGE, "attr"=>$attribute, "exclude"=>$exclude, "min"=>$min, "max"=>$max ); } /// setup anchor point for geosphere distance calculations /// required to use @geodist in filters and sorting /// latitude and longitude must be in radians function SetGeoAnchor ( $attrlat, $attrlong, $lat, $long ) { assert ( is_string($attrlat) ); assert ( is_string($attrlong) ); assert ( is_float($lat) ); assert ( is_float($long) ); $this->_anchor = array ( "attrlat"=>$attrlat, "attrlong"=>$attrlong, "lat"=>$lat, "long"=>$long ); } /// set grouping attribute and function function SetGroupBy ( $attribute, $func, $groupsort="@group desc" ) { assert ( is_string($attribute) ); assert ( is_string($groupsort) ); assert ( $func==SPH_GROUPBY_DAY || $func==SPH_GROUPBY_WEEK || $func==SPH_GROUPBY_MONTH || $func==SPH_GROUPBY_YEAR || $func==SPH_GROUPBY_ATTR || $func==SPH_GROUPBY_ATTRPAIR ); $this->_groupby = $attribute; $this->_groupfunc = $func; $this->_groupsort = $groupsort; } /// set count-distinct attribute for group-by queries function SetGroupDistinct ( $attribute ) { assert ( is_string($attribute) ); $this->_groupdistinct = $attribute; } /// set distributed retries count and delay function SetRetries ( $count, $delay=0 ) { assert ( is_int($count) && $count>=0 ); assert ( is_int($delay) && $delay>=0 ); $this->_retrycount = $count; $this->_retrydelay = $delay; } /// set result set format (hash or array; hash by default) /// PHP specific; needed for group-by-MVA result sets that may contain duplicate IDs function SetArrayResult ( $arrayresult ) { assert ( is_bool($arrayresult) ); $this->_arrayresult = $arrayresult; } /// set attribute values override /// there can be only one override per attribute /// $values must be a hash that maps document IDs to attribute values function SetOverride ( $attrname, $attrtype, $values ) { assert ( is_string ( $attrname ) ); assert ( in_array ( $attrtype, array ( SPH_ATTR_INTEGER, SPH_ATTR_TIMESTAMP, SPH_ATTR_BOOL, SPH_ATTR_FLOAT, SPH_ATTR_BIGINT ) ) ); assert ( is_array ( $values ) ); $this->_overrides[$attrname] = array ( "attr"=>$attrname, "type"=>$attrtype, "values"=>$values ); } /// set select-list (attributes or expressions), SQL-like syntax function SetSelect ( $select ) { assert ( is_string ( $select ) ); $this->_select = $select; } ////////////////////////////////////////////////////////////////////////////// /// clear all filters (for multi-queries) function ResetFilters () { $this->_filters = array(); $this->_anchor = array(); } /// clear groupby settings (for multi-queries) function ResetGroupBy () { $this->_groupby = ""; $this->_groupfunc = SPH_GROUPBY_DAY; $this->_groupsort = "@group desc"; $this->_groupdistinct= ""; } /// clear all attribute value overrides (for multi-queries) function ResetOverrides () { $this->_overrides = array (); } ////////////////////////////////////////////////////////////////////////////// /// connect to searchd server, run given search query through given indexes, /// and return the search results function Query ( $query, $index="*", $comment="" ) { assert ( empty($this->_reqs) ); $this->AddQuery ( $query, $index, $comment ); $results = $this->RunQueries (); if ( !is_array($results) ) return false; // probably network error; error message should be already filled $this->_error = $results[0]["error"]; $this->_warning = $results[0]["warning"]; if ( $results[0]["status"]==SEARCHD_ERROR ) return false; else return $results[0]; } /// helper to pack floats in network byte order function _PackFloat ( $f ) { $t1 = pack ( "f", $f ); // machine order list(,$t2) = unpack ( "L*", $t1 ); // int in machine order return pack ( "N", $t2 ); } /// add query to multi-query batch /// returns index into results array from RunQueries() call function AddQuery ( $query, $index="*", $comment="" ) { // mbstring workaround $this->_MBPush (); // build request $req = pack ( "NNNNN", $this->_offset, $this->_limit, $this->_mode, $this->_ranker, $this->_sort ); // mode and limits $req .= pack ( "N", strlen($this->_sortby) ) . $this->_sortby; $req .= pack ( "N", strlen($query) ) . $query; // query itself $req .= pack ( "N", count($this->_weights) ); // weights foreach ( $this->_weights as $weight ) $req .= pack ( "N", (int)$weight ); $req .= pack ( "N", strlen($index) ) . $index; // indexes $req .= pack ( "N", 1 ); // id64 range marker $req .= sphPack64 ( $this->_min_id ) . sphPack64 ( $this->_max_id ); // id64 range // filters $req .= pack ( "N", count($this->_filters) ); foreach ( $this->_filters as $filter ) { $req .= pack ( "N", strlen($filter["attr"]) ) . $filter["attr"]; $req .= pack ( "N", $filter["type"] ); switch ( $filter["type"] ) { case SPH_FILTER_VALUES: $req .= pack ( "N", count($filter["values"]) ); foreach ( $filter["values"] as $value ) $req .= sphPack64 ( $value ); break; case SPH_FILTER_RANGE: $req .= sphPack64 ( $filter["min"] ) . sphPack64 ( $filter["max"] ); break; case SPH_FILTER_FLOATRANGE: $req .= $this->_PackFloat ( $filter["min"] ) . $this->_PackFloat ( $filter["max"] ); break; default: assert ( 0 && "internal error: unhandled filter type" ); } $req .= pack ( "N", $filter["exclude"] ); } // group-by clause, max-matches count, group-sort clause, cutoff count $req .= pack ( "NN", $this->_groupfunc, strlen($this->_groupby) ) . $this->_groupby; $req .= pack ( "N", $this->_maxmatches ); $req .= pack ( "N", strlen($this->_groupsort) ) . $this->_groupsort; $req .= pack ( "NNN", $this->_cutoff, $this->_retrycount, $this->_retrydelay ); $req .= pack ( "N", strlen($this->_groupdistinct) ) . $this->_groupdistinct; // anchor point if ( empty($this->_anchor) ) { $req .= pack ( "N", 0 ); } else { $a =& $this->_anchor; $req .= pack ( "N", 1 ); $req .= pack ( "N", strlen($a["attrlat"]) ) . $a["attrlat"]; $req .= pack ( "N", strlen($a["attrlong"]) ) . $a["attrlong"]; $req .= $this->_PackFloat ( $a["lat"] ) . $this->_PackFloat ( $a["long"] ); } // per-index weights $req .= pack ( "N", count($this->_indexweights) ); foreach ( $this->_indexweights as $idx=>$weight ) $req .= pack ( "N", strlen($idx) ) . $idx . pack ( "N", $weight ); // max query time $req .= pack ( "N", $this->_maxquerytime ); // per-field weights $req .= pack ( "N", count($this->_fieldweights) ); foreach ( $this->_fieldweights as $field=>$weight ) $req .= pack ( "N", strlen($field) ) . $field . pack ( "N", $weight ); // comment $req .= pack ( "N", strlen($comment) ) . $comment; // attribute overrides $req .= pack ( "N", count($this->_overrides) ); foreach ( $this->_overrides as $key => $entry ) { $req .= pack ( "N", strlen($entry["attr"]) ) . $entry["attr"]; $req .= pack ( "NN", $entry["type"], count($entry["values"]) ); foreach ( $entry["values"] as $id=>$val ) { assert ( is_numeric($id) ); assert ( is_numeric($val) ); $req .= sphPack64 ( $id ); switch ( $entry["type"] ) { case SPH_ATTR_FLOAT: $req .= $this->_PackFloat ( $val ); break; case SPH_ATTR_BIGINT: $req .= sphPack64 ( $val ); break; default: $req .= pack ( "N", $val ); break; } } } // select-list $req .= pack ( "N", strlen($this->_select) ) . $this->_select; // mbstring workaround $this->_MBPop (); // store request to requests array $this->_reqs[] = $req; return count($this->_reqs)-1; } /// connect to searchd, run queries batch, and return an array of result sets function RunQueries () { if ( empty($this->_reqs) ) { $this->_error = "no queries defined, issue AddQuery() first"; return false; } // mbstring workaround $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop (); return false; } //////////////////////////// // send query, get response //////////////////////////// $nreqs = count($this->_reqs); $req = join ( "", $this->_reqs ); $len = 4+strlen($req); $req = pack ( "nnNN", SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, $len, $nreqs ) . $req; // add header fwrite ( $fp, $req, $len+8 ); if (!( $response = $this->_GetResponse ( $fp, VER_COMMAND_SEARCH ) )) { $this->_MBPop (); return false; } $this->_reqs = array (); ////////////////// // parse response ////////////////// $p = 0; // current position $max = strlen($response); // max position for checks, to protect against broken responses $results = array (); for ( $ires=0; $ires<$nreqs && $p<$max; $ires++ ) { $results[] = array(); $result =& $results[$ires]; $result["error"] = ""; $result["warning"] = ""; // extract status list(,$status) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $result["status"] = $status; if ( $status!=SEARCHD_OK ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $message = substr ( $response, $p, $len ); $p += $len; if ( $status==SEARCHD_WARNING ) { $result["warning"] = $message; } else { $result["error"] = $message; continue; } } // read schema $fields = array (); $attrs = array (); list(,$nfields) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; while ( $nfields-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $fields[] = substr ( $response, $p, $len ); $p += $len; } $result["fields"] = $fields; list(,$nattrs) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; while ( $nattrs-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attr = substr ( $response, $p, $len ); $p += $len; list(,$type) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attrs[$attr] = $type; } $result["attrs"] = $attrs; // read match count list(,$count) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; list(,$id64) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; // read matches $idx = -1; while ( $count-->0 && $p<$max ) { // index into result array $idx++; // parse document id and weight if ( $id64 ) { $doc = sphUnpack64 ( substr ( $response, $p, 8 ) ); $p += 8; list(,$weight) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; } else { list ( $doc, $weight ) = array_values ( unpack ( "N*N*", substr ( $response, $p, 8 ) ) ); $p += 8; if ( PHP_INT_SIZE>=8 ) { // x64 route, workaround broken unpack() in 5.2.2+ if ( $doc<0 ) $doc += (1<<32); } else { // x32 route, workaround php signed/unsigned braindamage $doc = sprintf ( "%u", $doc ); } } $weight = sprintf ( "%u", $weight ); // create match entry if ( $this->_arrayresult ) $result["matches"][$idx] = array ( "id"=>$doc, "weight"=>$weight ); else $result["matches"][$doc]["weight"] = $weight; // parse and create attributes $attrvals = array (); foreach ( $attrs as $attr=>$type ) { // handle 64bit ints if ( $type==SPH_ATTR_BIGINT ) { $attrvals[$attr] = sphUnpack64 ( substr ( $response, $p, 8 ) ); $p += 8; continue; } // handle floats if ( $type==SPH_ATTR_FLOAT ) { list(,$uval) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; list(,$fval) = unpack ( "f*", pack ( "L", $uval ) ); $attrvals[$attr] = $fval; continue; } // handle everything else as unsigned ints list(,$val) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; if ( $type & SPH_ATTR_MULTI ) { $attrvals[$attr] = array (); $nvalues = $val; while ( $nvalues-->0 && $p<$max ) { list(,$val) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $attrvals[$attr][] = sprintf ( "%u", $val ); } } else { $attrvals[$attr] = sprintf ( "%u", $val ); } } if ( $this->_arrayresult ) $result["matches"][$idx]["attrs"] = $attrvals; else $result["matches"][$doc]["attrs"] = $attrvals; } list ( $total, $total_found, $msecs, $words ) = array_values ( unpack ( "N*N*N*N*", substr ( $response, $p, 16 ) ) ); $result["total"] = sprintf ( "%u", $total ); $result["total_found"] = sprintf ( "%u", $total_found ); $result["time"] = sprintf ( "%.3f", $msecs/1000 ); $p += 16; while ( $words-->0 && $p<$max ) { list(,$len) = unpack ( "N*", substr ( $response, $p, 4 ) ); $p += 4; $word = substr ( $response, $p, $len ); $p += $len; list ( $docs, $hits ) = array_values ( unpack ( "N*N*", substr ( $response, $p, 8 ) ) ); $p += 8; $result["words"][$word] = array ( "docs"=>sprintf ( "%u", $docs ), "hits"=>sprintf ( "%u", $hits ) ); } } $this->_MBPop (); return $results; } ///////////////////////////////////////////////////////////////////////////// // excerpts generation ///////////////////////////////////////////////////////////////////////////// /// connect to searchd server, and generate exceprts (snippets) /// of given documents for given query. returns false on failure, /// an array of snippets on success function BuildExcerpts ( $docs, $index, $words, $opts=array() ) { assert ( is_array($docs) ); assert ( is_string($index) ); assert ( is_string($words) ); assert ( is_array($opts) ); $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return false; } ///////////////// // fixup options ///////////////// if ( !isset($opts["before_match"]) ) $opts["before_match"] = ""; if ( !isset($opts["after_match"]) ) $opts["after_match"] = ""; if ( !isset($opts["chunk_separator"]) ) $opts["chunk_separator"] = " ... "; if ( !isset($opts["limit"]) ) $opts["limit"] = 256; if ( !isset($opts["around"]) ) $opts["around"] = 5; if ( !isset($opts["exact_phrase"]) ) $opts["exact_phrase"] = false; if ( !isset($opts["single_passage"]) ) $opts["single_passage"] = false; if ( !isset($opts["use_boundaries"]) ) $opts["use_boundaries"] = false; if ( !isset($opts["weight_order"]) ) $opts["weight_order"] = false; ///////////////// // build request ///////////////// // v.1.0 req $flags = 1; // remove spaces if ( $opts["exact_phrase"] ) $flags |= 2; if ( $opts["single_passage"] ) $flags |= 4; if ( $opts["use_boundaries"] ) $flags |= 8; if ( $opts["weight_order"] ) $flags |= 16; $req = pack ( "NN", 0, $flags ); // mode=0, flags=$flags $req .= pack ( "N", strlen($index) ) . $index; // req index $req .= pack ( "N", strlen($words) ) . $words; // req words // options $req .= pack ( "N", strlen($opts["before_match"]) ) . $opts["before_match"]; $req .= pack ( "N", strlen($opts["after_match"]) ) . $opts["after_match"]; $req .= pack ( "N", strlen($opts["chunk_separator"]) ) . $opts["chunk_separator"]; $req .= pack ( "N", (int)$opts["limit"] ); $req .= pack ( "N", (int)$opts["around"] ); // documents $req .= pack ( "N", count($docs) ); foreach ( $docs as $doc ) { assert ( is_string($doc) ); $req .= pack ( "N", strlen($doc) ) . $doc; } //////////////////////////// // send query, get response //////////////////////////// $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, $len ) . $req; // add header $wrote = fwrite ( $fp, $req, $len+8 ); if (!( $response = $this->_GetResponse ( $fp, VER_COMMAND_EXCERPT ) )) { $this->_MBPop (); return false; } ////////////////// // parse response ////////////////// $pos = 0; $res = array (); $rlen = strlen($response); for ( $i=0; $i $rlen ) { $this->_error = "incomplete reply"; $this->_MBPop (); return false; } $res[] = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; } $this->_MBPop (); return $res; } ///////////////////////////////////////////////////////////////////////////// // keyword generation ///////////////////////////////////////////////////////////////////////////// /// connect to searchd server, and generate keyword list for a given query /// returns false on failure, /// an array of words on success function BuildKeywords ( $query, $index, $hits ) { assert ( is_string($query) ); assert ( is_string($index) ); assert ( is_bool($hits) ); $this->_MBPush (); if (!( $fp = $this->_Connect() )) { $this->_MBPop(); return false; } ///////////////// // build request ///////////////// // v.1.0 req $req = pack ( "N", strlen($query) ) . $query; // req query $req .= pack ( "N", strlen($index) ) . $index; // req index $req .= pack ( "N", (int)$hits ); //////////////////////////// // send query, get response //////////////////////////// $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, $len ) . $req; // add header $wrote = fwrite ( $fp, $req, $len+8 ); if (!( $response = $this->_GetResponse ( $fp, VER_COMMAND_KEYWORDS ) )) { $this->_MBPop (); return false; } ////////////////// // parse response ////////////////// $pos = 0; $res = array (); $rlen = strlen($response); list(,$nwords) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; for ( $i=0; $i<$nwords; $i++ ) { list(,$len) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; $tokenized = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; list(,$len) = unpack ( "N*", substr ( $response, $pos, 4 ) ); $pos += 4; $normalized = $len ? substr ( $response, $pos, $len ) : ""; $pos += $len; $res[] = array ( "tokenized"=>$tokenized, "normalized"=>$normalized ); if ( $hits ) { list($ndocs,$nhits) = array_values ( unpack ( "N*N*", substr ( $response, $pos, 8 ) ) ); $pos += 8; $res [$i]["docs"] = $ndocs; $res [$i]["hits"] = $nhits; } if ( $pos > $rlen ) { $this->_error = "incomplete reply"; $this->_MBPop (); return false; } } $this->_MBPop (); return $res; } function EscapeString ( $string ) { $from = array ( '(',')','|','-','!','@','~','"','&', '/' ); $to = array ( '\(','\)','\|','\-','\!','\@','\~','\"', '\&', '\/' ); return str_replace ( $from, $to, $string ); } ///////////////////////////////////////////////////////////////////////////// // attribute updates ///////////////////////////////////////////////////////////////////////////// /// batch update given attributes in given rows in given indexes /// returns amount of updated documents (0 or more) on success, or -1 on failure function UpdateAttributes ( $index, $attrs, $values, $mva=false ) { // verify everything assert ( is_string($index) ); assert ( is_bool($mva) ); assert ( is_array($attrs) ); foreach ( $attrs as $attr ) assert ( is_string($attr) ); assert ( is_array($values) ); foreach ( $values as $id=>$entry ) { assert ( is_numeric($id) ); assert ( is_array($entry) ); assert ( count($entry)==count($attrs) ); foreach ( $entry as $v ) { if ( $mva ) { assert ( is_array($v) ); foreach ( $v as $vv ) assert ( is_int($vv) ); } else assert ( is_int($v) ); } } // build request $req = pack ( "N", strlen($index) ) . $index; $req .= pack ( "N", count($attrs) ); foreach ( $attrs as $attr ) { $req .= pack ( "N", strlen($attr) ) . $attr; $req .= pack ( "N", $mva ? 1 : 0 ); } $req .= pack ( "N", count($values) ); foreach ( $values as $id=>$entry ) { $req .= sphPack64 ( $id ); foreach ( $entry as $v ) { $req .= pack ( "N", $mva ? count($v) : $v ); if ( $mva ) foreach ( $v as $vv ) $req .= pack ( "N", $vv ); } } // connect, send query, get response if (!( $fp = $this->_Connect() )) return -1; $len = strlen($req); $req = pack ( "nnN", SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, $len ) . $req; // add header fwrite ( $fp, $req, $len+8 ); if (!( $response = $this->_GetResponse ( $fp, VER_COMMAND_UPDATE ) )) return -1; // parse response list(,$updated) = unpack ( "N*", substr ( $response, 0, 4 ) ); return $updated; } } // // $Id$ // ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_attr_asc.php0000644000176700017710000000020411006762325023656 0ustar deogardeogarSetSortMode(SPH_SORT_ATTR_ASC, 'sortby'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/default_search.php0000644000176700017710000000012511006762325023762 0ustar deogardeogarQuery('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/excerpt_flags.php0000644000176700017710000000066611006762325023651 0ustar deogardeogarBuildExcerpts(array('10', '20'), 'index', 'word1 word2', array('exact_phrase' => true, 'single_passage' => true, 'use_boundaries' => true, 'weight_order' => true)); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_expr.php0000644000176700017710000000020011006762325023030 0ustar deogardeogarSetSortMode(SPH_SORT_EXPR, 'sortby'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/id_range.php0000644000176700017710000000015611006762325022565 0ustar deogardeogarSetIDRange(10, 20); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/match_all.php0000644000176700017710000000016711006762325022743 0ustar deogardeogarSetMatchMode(SPH_MATCH_ALL); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter.php0000644000176700017710000000020011006762325022270 0ustar deogardeogarSetFilter('attr', array(10, 20, 30)); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_extended.php0000644000176700017710000000020411006762325023656 0ustar deogardeogarSetSortMode(SPH_SORT_EXTENDED, 'sortby'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_attrpair.php0000644000176700017710000000020411006762325024543 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_ATTRPAIR); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/ranking_proximity.php0000644000176700017710000000017711220006630024561 0ustar deogardeogarSetRankingMode(SPH_RANK_PROXIMITY); $cl->Query('query'); ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/update_attributes_mva.php0000644000176700017710000000027111220006630025372 0ustar deogardeogarUpdateAttributes('index', array('group', 'category'), array(123 => array(array(456, 789), array(1, 2, 3))), true); ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/match_any.php0000644000176700017710000000016711006762325022762 0ustar deogardeogarSetMatchMode(SPH_MATCH_ANY); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_attr.php0000644000176700017710000000020011006762325023663 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_ATTR); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/id_range64.php0000644000176700017710000000017711006762325022742 0ustar deogardeogarSetIDRange(8589934591, 17179869183); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/set_override.php0000644000176700017710000000043211220006630023470 0ustar deogardeogarSetOverride('attr1', SPH_ATTR_INTEGER, array(10 => 20)); $cl->SetOverride('attr2', SPH_ATTR_FLOAT, array(11 => 30.3)); $cl->SetOverride('attr3', SPH_ATTR_BIGINT, array(12 => 1099511627780)); $cl->Query('query'); ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/match_boolean.php0000644000176700017710000000017311006762325023607 0ustar deogardeogarSetMatchMode(SPH_MATCH_BOOLEAN); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/index_weights.php0000644000176700017710000000022211006762325023650 0ustar deogardeogarSetIndexWeights(array('index1' => 10, 'index2' => 20)); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filters.php0000644000176700017710000000025111006762325022461 0ustar deogardeogarSetFilter('attr2', array(40, 50)); $cl->SetFilter('attr1', array(10, 20, 30)); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/retries.php0000644000176700017710000000015211006762325022466 0ustar deogardeogarSetRetries(10); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_day_sort.php0000644000176700017710000000021311006762325024541 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_DAY, 'somesort'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/match_fullscan.php0000644000176700017710000000017411006762325024000 0ustar deogardeogarSetMatchMode(SPH_MATCH_FULLSCAN); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_range.php0000644000176700017710000000017211006762325023454 0ustar deogardeogarSetFilterRange('attr', 10, 20); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_time_segments.php0000644000176700017710000000021111006762325024717 0ustar deogardeogarSetSortMode(SPH_SORT_TIME_SEGMENTS, 'sortby'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/ranking_bm25.php0000644000176700017710000000017111006762325023270 0ustar deogardeogarSetRankingMode(SPH_RANK_BM25); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/limits_max.php0000644000176700017710000000016111006762325023157 0ustar deogardeogarSetLimits(10, 20, 30); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/geo_anchor.php0000644000176700017710000000021311006762325023113 0ustar deogardeogarSetGeoAnchor('attrlat', 'attrlong', 20.3, 40.7); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_year.php0000644000176700017710000000020011006762325023651 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_YEAR); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filters_different.php0000644000176700017710000000046511006762325024516 0ustar deogardeogarSetFilterRange('attr1', 10, 20, true); $cl->SetFilter('attr3', array(30, 40, 50)); $cl->SetFilterRange('attr1', 60, 70); $cl->SetFilter('attr2', array(80, 90, 100), true); $cl->SetFilterFloatRange('attr1', 60.8, 70.5); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/miltiple_queries.php0000644000176700017710000000030611006762325024366 0ustar deogardeogarSetRetries(10, 20); $cl->AddQuery('test1'); $cl->SetGroupBy('attr', SPH_GROUPBY_DAY); $cl->AddQuery('test2'); $cl->RunQueries(); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/keywords.php0000644000176700017710000000015311220006630022645 0ustar deogardeogarBuildKeywords('test', 'index', true); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/match_extended2.php0000644000176700017710000000017511006762325024054 0ustar deogardeogarSetMatchMode(SPH_MATCH_EXTENDED2); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/ranking_wordcount.php0000644000176700017710000000017611006762325024554 0ustar deogardeogarSetRankingMode(SPH_RANK_WORDCOUNT); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/update_attributes.php0000644000176700017710000000021211006762325024536 0ustar deogardeogarUpdateAttributes('index', array('group'), array(123 => array(456))); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/match_extended.php0000644000176700017710000000017411006762325023771 0ustar deogardeogarSetMatchMode(SPH_MATCH_EXTENDED); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_float_range_exclude.php0000644000176700017710000000021111006762325026344 0ustar deogardeogarSetFilterFloatRange('attr', 10.5, 20.3, true); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/weights.php0000644000176700017710000000017511006762325022470 0ustar deogardeogarSetWeights(array(10, 20, 30, 40)); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_distinct.php0000644000176700017710000000023611006762325024051 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_DAY); $cl->SetGroupDistinct('attr'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/ranking_proximity_bm25.php0000644000176700017710000000020311006762325025410 0ustar deogardeogarSetRankingMode(SPH_RANK_PROXIMITY_BM25); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_range_int64.php0000644000176700017710000000027211220006630024465 0ustar deogardeogarSetFilterRange('attr1', -10, 20); $cl->SetFilterRange('attr2', -1099511627770, 1099511627780); $cl->Query('query'); ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_attr_desc.php0000644000176700017710000000020511006762325024027 0ustar deogardeogarSetSortMode(SPH_SORT_ATTR_DESC, 'sortby'); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_exclude.php0000644000176700017710000000020611006762325024007 0ustar deogardeogarSetFilter('attr', array(10, 20, 30), true); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/match_phrase.php0000644000176700017710000000017211006762325023451 0ustar deogardeogarSetMatchMode(SPH_MATCH_PHRASE); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/retries_delay.php0000644000176700017710000000015611006762325023650 0ustar deogardeogarSetRetries(10, 20); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/field_weights.php0000644000176700017710000000022211006762325023624 0ustar deogardeogarSetFieldWeights(array('field1' => 10, 'field2' => 20)); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_day.php0000644000176700017710000000017711006762325023503 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_DAY); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/select.php0000644000176700017710000000016611220006630022261 0ustar deogardeogarSetSelect('attr1, attr2'); $cl->Query('query'); ?> sphinx-2.0.4-release/api/ruby/spec/fixtures/limits_cutoff.php0000644000176700017710000000016511006762325023664 0ustar deogardeogarSetLimits(10, 20, 30, 40); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/ranking_none.php0000644000176700017710000000017111006762325023462 0ustar deogardeogarSetRankingMode(SPH_RANK_NONE); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/limits.php0000644000176700017710000000015511006762325022315 0ustar deogardeogarSetLimits(10, 20); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/group_by_month.php0000644000176700017710000000020111006762325024037 0ustar deogardeogarSetGroupBy('attr', SPH_GROUPBY_MONTH); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_ranges.php0000644000176700017710000000024111006762325023634 0ustar deogardeogarSetFilterRange('attr2', 30, 40); $cl->SetFilterRange('attr1', 10, 20); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_float_range.php0000644000176700017710000000020311006762325024634 0ustar deogardeogarSetFilterFloatRange('attr', 10.5, 20.3); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/default_search_index.php0000644000176700017710000000013611006762325025153 0ustar deogardeogarQuery('query', 'index'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/sort_relevance.php0000644000176700017710000000017311006762325024027 0ustar deogardeogarSetSortMode(SPH_SORT_RELEVANCE); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/max_query_time.php0000644000176700017710000000016111006762325024041 0ustar deogardeogarSetMaxQueryTime(1000); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/filter_range_exclude.php0000644000176700017710000000020011006762325025155 0ustar deogardeogarSetFilterRange('attr', 10, 20, true); $cl->Query('query'); ?>sphinx-2.0.4-release/api/ruby/spec/fixtures/excerpt_default.php0000644000176700017710000000017711006762325024176 0ustar deogardeogarBuildExcerpts(array('10', '20'), 'index', 'word1 word2'); ?>sphinx-2.0.4-release/api/ruby/spec/client_spec.rb0000644000176700017710000004312111301034527021241 0ustar deogardeogarrequire File.dirname(__FILE__) + '/../init' class SphinxSpecError < StandardError; end module SphinxFixtureHelper def sphinx_fixture(name) `php #{File.dirname(__FILE__)}/fixtures/#{name}.php` end end module SphinxApiCall def create_sphinx @sphinx = Sphinx::Client.new @sock = mock('TCPSocket') @sphinx.stub!(:Connect).and_return(@sock) @sphinx.stub!(:GetResponse).and_raise(SphinxSpecError) return @sphinx end def safe_call yield rescue SphinxSpecError end end describe 'The Connect method of Sphinx::Client' do before(:each) do @sphinx = Sphinx::Client.new @sock = mock('TCPSocket') end it 'should establish TCP connection to the server and initialize session' do TCPSocket.should_receive(:new).with('localhost', 9312).and_return(@sock) @sock.should_receive(:recv).with(4).and_return([1].pack('N')) @sock.should_receive(:send).with([1].pack('N'), 0) @sphinx.send(:Connect).should be(@sock) end it 'should raise exception when searchd protocol is not 1+' do TCPSocket.should_receive(:new).with('localhost', 9312).and_return(@sock) @sock.should_receive(:recv).with(4).and_return([0].pack('N')) @sock.should_receive(:close) lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError) @sphinx.GetLastError.should == 'expected searchd protocol version 1+, got version \'0\'' end it 'should raise exception on connection error' do TCPSocket.should_receive(:new).with('localhost', 9312).and_raise(Errno::EBADF) lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError) @sphinx.GetLastError.should == 'connection to localhost:9312 failed' end it 'should use custom host and port' do @sphinx.SetServer('anotherhost', 55555) TCPSocket.should_receive(:new).with('anotherhost', 55555).and_raise(Errno::EBADF) lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError) end end describe 'The GetResponse method of Sphinx::Client' do before(:each) do @sphinx = Sphinx::Client.new @sock = mock('TCPSocket') @sock.should_receive(:close) end it 'should receive response' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 4].pack('n2N')) @sock.should_receive(:recv).with(4).and_return([0].pack('N')) @sphinx.send(:GetResponse, @sock, 1) end it 'should raise exception on zero-sized response' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 0].pack('n2N')) lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxResponseError) end it 'should raise exception when response is incomplete' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 4].pack('n2N')) @sock.should_receive(:recv).with(4).and_raise(EOFError) lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxResponseError) end it 'should set warning message when SEARCHD_WARNING received' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_WARNING, 1, 14].pack('n2N')) @sock.should_receive(:recv).with(14).and_return([5].pack('N') + 'helloworld') @sphinx.send(:GetResponse, @sock, 1).should == 'world' @sphinx.GetLastWarning.should == 'hello' end it 'should raise exception when SEARCHD_ERROR received' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_ERROR, 1, 9].pack('n2N')) @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello') lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxInternalError) @sphinx.GetLastError.should == 'searchd error: hello' end it 'should raise exception when SEARCHD_RETRY received' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_RETRY, 1, 9].pack('n2N')) @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello') lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxTemporaryError) @sphinx.GetLastError.should == 'temporary searchd error: hello' end it 'should raise exception when unknown status received' do @sock.should_receive(:recv).with(8).and_return([65535, 1, 9].pack('n2N')) @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello') lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxUnknownError) @sphinx.GetLastError.should == 'unknown status code: \'65535\'' end it 'should set warning when server is older than client' do @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 9].pack('n2N')) @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello') @sphinx.send(:GetResponse, @sock, 5) @sphinx.GetLastWarning.should == 'searchd command v.0.1 older than client\'s v.0.5, some options might not work' end end describe 'The Query method of Sphinx::Client' do include SphinxFixtureHelper include SphinxApiCall before(:each) do @sphinx = create_sphinx end it 'should generate valid request with default parameters' do expected = sphinx_fixture('default_search') @sock.should_receive(:send).with(expected, 0) @sphinx.Query('query') rescue nil? end it 'should generate valid request with default parameters and index' do expected = sphinx_fixture('default_search_index') @sock.should_receive(:send).with(expected, 0) @sphinx.Query('query', 'index') rescue nil? end it 'should generate valid request with limits' do expected = sphinx_fixture('limits') @sock.should_receive(:send).with(expected, 0) @sphinx.SetLimits(10, 20) @sphinx.Query('query') rescue nil? end it 'should generate valid request with limits and max number to retrieve' do expected = sphinx_fixture('limits_max') @sock.should_receive(:send).with(expected, 0) @sphinx.SetLimits(10, 20, 30) @sphinx.Query('query') rescue nil? end it 'should generate valid request with limits and cutoff to retrieve' do expected = sphinx_fixture('limits_cutoff') @sock.should_receive(:send).with(expected, 0) @sphinx.SetLimits(10, 20, 30, 40) @sphinx.Query('query') rescue nil? end it 'should generate valid request with max query time specified' do expected = sphinx_fixture('max_query_time') @sock.should_receive(:send).with(expected, 0) @sphinx.SetMaxQueryTime(1000) @sphinx.Query('query') rescue nil? end describe 'with match' do [ :all, :any, :phrase, :boolean, :extended, :fullscan, :extended2 ].each do |match| it "should generate valid request for SPH_MATCH_#{match.to_s.upcase}" do expected = sphinx_fixture("match_#{match}") @sock.should_receive(:send).with(expected, 0) @sphinx.SetMatchMode(Sphinx::Client::const_get("SPH_MATCH_#{match.to_s.upcase}")) @sphinx.Query('query') rescue nil? end end end describe 'with rank' do [ :proximity_bm25, :bm25, :none, :wordcount, :proximity ].each do |rank| it "should generate valid request for SPH_RANK_#{rank.to_s.upcase}" do expected = sphinx_fixture("ranking_#{rank}") @sock.should_receive(:send).with(expected, 0) @sphinx.SetRankingMode(Sphinx::Client.const_get("SPH_RANK_#{rank.to_s.upcase}")) @sphinx.Query('query') rescue nil? end end end describe 'with sorting' do [ :attr_desc, :relevance, :attr_asc, :time_segments, :extended, :expr ].each do |mode| it "should generate valid request for SPH_SORT_#{mode.to_s.upcase}" do expected = sphinx_fixture("sort_#{mode}") @sock.should_receive(:send).with(expected, 0) @sphinx.SetSortMode(Sphinx::Client.const_get("SPH_SORT_#{mode.to_s.upcase}"), mode == :relevance ? '' : 'sortby') @sphinx.Query('query') rescue nil? end end end it 'should generate valid request with weights' do expected = sphinx_fixture('weights') @sock.should_receive(:send).with(expected, 0) @sphinx.SetWeights([10, 20, 30, 40]) @sphinx.Query('query') rescue nil? end it 'should generate valid request with field weights' do expected = sphinx_fixture('field_weights') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFieldWeights({'field1' => 10, 'field2' => 20}) @sphinx.Query('query') rescue nil? end it 'should generate valid request with index weights' do expected = sphinx_fixture('index_weights') @sock.should_receive(:send).with(expected, 0) @sphinx.SetIndexWeights({'index1' => 10, 'index2' => 20}) @sphinx.Query('query') rescue nil? end it 'should generate valid request with ID range' do expected = sphinx_fixture('id_range') @sock.should_receive(:send).with(expected, 0) @sphinx.SetIDRange(10, 20) @sphinx.Query('query') rescue nil? end it 'should generate valid request with ID range and 64-bit ints' do expected = sphinx_fixture('id_range64') @sock.should_receive(:send).with(expected, 0) @sphinx.SetIDRange(8589934591, 17179869183) @sphinx.Query('query') rescue nil? end it 'should generate valid request with values filter' do expected = sphinx_fixture('filter') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilter('attr', [10, 20, 30]) @sphinx.Query('query') rescue nil? end it 'should generate valid request with two values filters' do expected = sphinx_fixture('filters') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilter('attr2', [40, 50]) @sphinx.SetFilter('attr1', [10, 20, 30]) @sphinx.Query('query') rescue nil? end it 'should generate valid request with values filter excluded' do expected = sphinx_fixture('filter_exclude') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilter('attr', [10, 20, 30], true) @sphinx.Query('query') rescue nil? end it 'should generate valid request with values filter range' do expected = sphinx_fixture('filter_range') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterRange('attr', 10, 20) @sphinx.Query('query') rescue nil? end it 'should generate valid request with two filter ranges' do expected = sphinx_fixture('filter_ranges') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterRange('attr2', 30, 40) @sphinx.SetFilterRange('attr1', 10, 20) @sphinx.Query('query') rescue nil? end it 'should generate valid request with filter range excluded' do expected = sphinx_fixture('filter_range_exclude') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterRange('attr', 10, 20, true) @sphinx.Query('query') rescue nil? end it 'should generate valid request with signed int64-based filter range' do expected = sphinx_fixture('filter_range_int64') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterRange('attr1', -10, 20) @sphinx.SetFilterRange('attr2', -1099511627770, 1099511627780) safe_call { @sphinx.Query('query') } end it 'should generate valid request with float filter range' do expected = sphinx_fixture('filter_float_range') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterFloatRange('attr', 10.5, 20.3) @sphinx.Query('query') rescue nil? end it 'should generate valid request with float filter excluded' do expected = sphinx_fixture('filter_float_range_exclude') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterFloatRange('attr', 10.5, 20.3, true) @sphinx.Query('query') rescue nil? end it 'should generate valid request with different filters' do expected = sphinx_fixture('filters_different') @sock.should_receive(:send).with(expected, 0) @sphinx.SetFilterRange('attr1', 10, 20, true) @sphinx.SetFilter('attr3', [30, 40, 50]) @sphinx.SetFilterRange('attr1', 60, 70) @sphinx.SetFilter('attr2', [80, 90, 100], true) @sphinx.SetFilterFloatRange('attr1', 60.8, 70.5) @sphinx.Query('query') rescue nil? end it 'should generate valid request with geographical anchor point' do expected = sphinx_fixture('geo_anchor') @sock.should_receive(:send).with(expected, 0) @sphinx.SetGeoAnchor('attrlat', 'attrlong', 20.3, 40.7) @sphinx.Query('query') rescue nil? end describe 'with group by' do [ :day, :week, :month, :year, :attr, :attrpair ].each do |groupby| it "should generate valid request for SPH_GROUPBY_#{groupby.to_s.upcase}" do expected = sphinx_fixture("group_by_#{groupby}") @sock.should_receive(:send).with(expected, 0) @sphinx.SetGroupBy('attr', Sphinx::Client::const_get("SPH_GROUPBY_#{groupby.to_s.upcase}")) @sphinx.Query('query') rescue nil? end end it 'should generate valid request for SPH_GROUPBY_DAY with sort' do expected = sphinx_fixture('group_by_day_sort') @sock.should_receive(:send).with(expected, 0) @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY, 'somesort') @sphinx.Query('query') rescue nil? end it 'should generate valid request with count-distinct attribute' do expected = sphinx_fixture('group_distinct') @sock.should_receive(:send).with(expected, 0) @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY) @sphinx.SetGroupDistinct('attr') @sphinx.Query('query') rescue nil? end end it 'should generate valid request with retries count specified' do expected = sphinx_fixture('retries') @sock.should_receive(:send).with(expected, 0) @sphinx.SetRetries(10) @sphinx.Query('query') rescue nil? end it 'should generate valid request with retries count and delay specified' do expected = sphinx_fixture('retries_delay') @sock.should_receive(:send).with(expected, 0) @sphinx.SetRetries(10, 20) @sphinx.Query('query') rescue nil? end it 'should generate valid request for SetOverride' do expected = sphinx_fixture('set_override') @sock.should_receive(:send).with(expected, 0) @sphinx.SetOverride('attr1', Sphinx::Client::SPH_ATTR_INTEGER, { 10 => 20 }) @sphinx.SetOverride('attr2', Sphinx::Client::SPH_ATTR_FLOAT, { 11 => 30.3 }) @sphinx.SetOverride('attr3', Sphinx::Client::SPH_ATTR_BIGINT, { 12 => 1099511627780 }) @sphinx.Query('query') rescue nil? end it 'should generate valid request for SetSelect' do expected = sphinx_fixture('select') @sock.should_receive(:send).with(expected, 0) @sphinx.SetSelect('attr1, attr2') @sphinx.Query('query') rescue nil? end end describe 'The RunQueries method of Sphinx::Client' do include SphinxFixtureHelper before(:each) do @sphinx = Sphinx::Client.new @sock = mock('TCPSocket') @sphinx.stub!(:Connect).and_return(@sock) @sphinx.stub!(:GetResponse).and_raise(Sphinx::SphinxError) end it 'should generate valid request for multiple queries' do expected = sphinx_fixture('miltiple_queries') @sock.should_receive(:send).with(expected, 0) @sphinx.SetRetries(10, 20) @sphinx.AddQuery('test1') @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY) @sphinx.AddQuery('test2') rescue nil? @sphinx.RunQueries rescue nil? end end describe 'The BuildExcerpts method of Sphinx::Client' do include SphinxFixtureHelper before(:each) do @sphinx = Sphinx::Client.new @sock = mock('TCPSocket') @sphinx.stub!(:Connect).and_return(@sock) @sphinx.stub!(:GetResponse).and_raise(Sphinx::SphinxError) end it 'should generate valid request with default parameters' do expected = sphinx_fixture('excerpt_default') @sock.should_receive(:send).with(expected, 0) @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2') rescue nil? end it 'should generate valid request with custom parameters' do expected = sphinx_fixture('excerpt_custom') @sock.should_receive(:send).with(expected, 0) @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2', { 'before_match' => 'before', 'after_match' => 'after', 'chunk_separator' => 'separator', 'limit' => 10 }) rescue nil? end it 'should generate valid request with flags' do expected = sphinx_fixture('excerpt_flags') @sock.should_receive(:send).with(expected, 0) @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2', { 'exact_phrase' => true, 'single_passage' => true, 'use_boundaries' => true, 'weight_order' => true }) rescue nil? end end describe 'The BuildKeywords method of Sphinx::Client' do include SphinxFixtureHelper include SphinxApiCall before(:each) do @sphinx = create_sphinx end it 'should generate valid request' do expected = sphinx_fixture('keywords') @sock.should_receive(:send).with(expected, 0) safe_call { @sphinx.BuildKeywords('test', 'index', true) } end end describe 'The UpdateAttributes method of Sphinx::Client' do include SphinxFixtureHelper include SphinxApiCall before(:each) do @sphinx = create_sphinx end it 'should generate valid request' do expected = sphinx_fixture('update_attributes') @sock.should_receive(:send).with(expected, 0) safe_call { @sphinx.UpdateAttributes('index', ['group'], { 123 => [456] }) } end it 'should generate valid request for MVA' do expected = sphinx_fixture('update_attributes_mva') @sock.should_receive(:send).with(expected, 0) safe_call { @sphinx.UpdateAttributes('index', ['group', 'category'], { 123 => [ [456, 789], [1, 2, 3] ] }, true) } end end sphinx-2.0.4-release/api/ruby/spec/client_response_spec.rb0000644000176700017710000001114011062775577023200 0ustar deogardeogarrequire File.dirname(__FILE__) + '/../init' # To execute these tests you need to execute sphinx_test.sql and configure sphinx using sphinx.conf # (both files are placed under sphinx directory) context 'The SphinxApi connected to Sphinx' do setup do @sphinx = Sphinx::Client.new end specify 'should parse response in Query method' do result = @sphinx.Query('wifi', 'test1') validate_results_wifi(result) end specify 'should process 64-bit keys in Query method' do result = @sphinx.Query('wifi', 'test2') result['total_found'].should == 3 result['matches'].length.should == 3 result['matches'][0]['id'].should == 4294967298 result['matches'][1]['id'].should == 4294967299 result['matches'][2]['id'].should == 4294967297 end specify 'should parse batch-query responce in RunQueries method' do @sphinx.AddQuery('wifi', 'test1') @sphinx.AddQuery('gprs', 'test1') results = @sphinx.RunQueries results.should be_an_instance_of(Array) results.length.should == 2 validate_results_wifi(results[0]) end specify 'should parse response in BuildExcerpts method' do result = @sphinx.BuildExcerpts(['what the world', 'London is the capital of Great Britain'], 'test1', 'the') result.should == ['what the world', 'London is the capital of Great Britain'] end specify 'should parse response in BuildKeywords method' do result = @sphinx.BuildKeywords('wifi gprs', 'test1', true) result.should == [ { 'normalized' => 'wifi', 'tokenized' => 'wifi', 'hits' => 6, 'docs' => 3 }, { 'normalized' => 'gprs', 'tokenized' => 'gprs', 'hits' => 1, 'docs' => 1 } ] end specify 'should parse response in UpdateAttributes method' do @sphinx.UpdateAttributes('test1', ['group_id'], { 2 => [1] }).should == 1 result = @sphinx.Query('wifi', 'test1') result['matches'][0]['attrs']['group_id'].should == 1 @sphinx.UpdateAttributes('test1', ['group_id'], { 2 => [2] }).should == 1 result = @sphinx.Query('wifi', 'test1') result['matches'][0]['attrs']['group_id'].should == 2 end specify 'should parse response in UpdateAttributes method with MVA' do @sphinx.UpdateAttributes('test1', ['tags'], { 2 => [[1, 2, 3, 4, 5, 6, 7, 8, 9]] }, true).should == 1 result = @sphinx.Query('wifi', 'test1') result['matches'][0]['attrs']['tags'].should == [1, 2, 3, 4, 5, 6, 7, 8, 9] @sphinx.UpdateAttributes('test1', ['tags'], { 2 => [[5, 6, 7, 8]] }, true).should == 1 result = @sphinx.Query('wifi', 'test1') result['matches'][0]['attrs']['tags'].should == [5, 6, 7, 8] end specify 'should process errors in Query method' do @sphinx.Query('wifi', 'fakeindex').should be_false @sphinx.GetLastError.length.should_not == 0 end specify 'should process errors in RunQueries method' do @sphinx.AddQuery('wifi', 'fakeindex') r = @sphinx.RunQueries r[0]['error'].length.should_not == 0 end def validate_results_wifi(result) result['total_found'].should == 3 result['matches'].length.should == 3 result['time'].should_not be_nil result['attrs'].should == { 'group_id' => Sphinx::Client::SPH_ATTR_INTEGER, 'created_at' => Sphinx::Client::SPH_ATTR_TIMESTAMP, 'rating' => Sphinx::Client::SPH_ATTR_FLOAT, 'tags' => Sphinx::Client::SPH_ATTR_MULTI | Sphinx::Client::SPH_ATTR_INTEGER } result['fields'].should == [ 'name', 'description' ] result['total'].should == 3 result['matches'].should be_an_instance_of(Array) result['matches'][0]['id'].should == 2 result['matches'][0]['weight'].should == 2 result['matches'][0]['attrs']['group_id'].should == 2 result['matches'][0]['attrs']['created_at'].should == 1175658555 result['matches'][0]['attrs']['tags'].should == [5, 6, 7, 8] ('%0.2f' % result['matches'][0]['attrs']['rating']).should == '54.85' result['matches'][1]['id'].should == 3 result['matches'][1]['weight'].should == 2 result['matches'][1]['attrs']['group_id'].should == 1 result['matches'][1]['attrs']['created_at'].should == 1175658647 result['matches'][1]['attrs']['tags'].should == [1, 7, 9, 10] ('%0.2f' % result['matches'][1]['attrs']['rating']).should == '16.25' result['matches'][2]['id'].should == 1 result['matches'][2]['weight'].should == 1 result['matches'][2]['attrs']['group_id'].should == 1 result['matches'][2]['attrs']['created_at'].should == 1175658490 result['matches'][2]['attrs']['tags'].should == [1, 2, 3, 4] ('%0.2f' % result['matches'][2]['attrs']['rating']).should == '13.32' result['words'].should == { 'wifi' => { 'hits' => 6, 'docs' => 3 } } end end sphinx-2.0.4-release/api/ruby/spec/sphinx/0000755000176700017710000000000011724063141017737 5ustar deogardeogarsphinx-2.0.4-release/api/ruby/spec/sphinx/sphinx.conf0000644000176700017710000000352211301034527022116 0ustar deogardeogarsource src1 { type = mysql sql_host = localhost sql_user = root sql_pass = sql_db = sphinx_test sql_port = 3306 sql_query = SELECT id, name, description, UNIX_TIMESTAMP(created_at) AS created_at, group_id, rating FROM links sql_attr_uint = group_id sql_attr_timestamp = created_at sql_attr_float = rating sql_attr_multi = uint tags from query; SELECT link_id, tag_id FROM links_tags sql_query_info = SELECT * FROM links WHERE id=$id } source src2 { type = mysql sql_host = localhost sql_user = root sql_pass = sql_db = sphinx_test sql_port = 3306 sql_query = SELECT id, name, description, UNIX_TIMESTAMP(created_at) AS created_at, group_id FROM links64 sql_attr_uint = group_id sql_attr_timestamp = created_at sql_query_info = SELECT * FROM links WHERE id=$id } index test1 { source = src1 path = /opt/sphinx-0.9.9/var/data/test1 docinfo = extern morphology = none stopwords = charset_type = utf-8 } index test2 { source = src2 path = /opt/sphinx-0.9.9/var/data/test2 docinfo = extern morphology = none stopwords = charset_type = utf-8 } indexer { mem_limit = 32M } searchd { port = 9312 log = /opt/sphinx-0.9.9/var/log/searchd.log query_log = /opt/sphinx-0.9.9/var/log/query.log read_timeout = 5 max_children = 30 pid_file = /opt/sphinx-0.9.9/var/log/searchd.pid max_matches = 1000 } sphinx-2.0.4-release/api/ruby/spec/sphinx/sphinx_test.sql0000644000176700017710000000706211011104230023015 0ustar deogardeogar/* SQLyog Enterprise - MySQL GUI v5.20 Host - 5.0.27-community-nt : Database - sphinx_test ********************************************************************* Server version : 5.0.27-community-nt */ SET NAMES utf8; SET SQL_MODE=''; CREATE database IF NOT EXISTS `sphinx_test`; USE `sphinx_test`; /* Table structure for table `links` */ DROP TABLE IF EXISTS `links`; CREATE TABLE `links` ( `id` INT(11) NOT NULL auto_increment, `name` VARCHAR(255) NOT NULL, `created_at` DATETIME NOT NULL, `description` TEXT, `group_id` INT(11) NOT NULL, `rating` FLOAT NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /* Table structure for table `tags` */ DROP TABLE IF EXISTS `tags`; CREATE TABLE `tags` ( `id` INT(11) NOT NULL auto_increment, `tag` VARCHAR(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /* Table structure for table `links_tags` */ DROP TABLE IF EXISTS `links_tags`; CREATE TABLE `links_tags` ( `link_id` INT(11) NOT NULL, `tag_id` INT(11) NOT NULL, PRIMARY KEY (`link_id`,`tag_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /* Table structure for table `links64` */ DROP TABLE IF EXISTS `links64`; CREATE TABLE `links64` ( `id` BIGINT(11) NOT NULL auto_increment, `name` VARCHAR(255) NOT NULL, `created_at` DATETIME NOT NULL, `description` TEXT, `group_id` INT(11) NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /* Data for the table `links` */ INSERT INTO `links`(`id`,`name`,`created_at`,`description`,`group_id`,`rating`) VALUES (1,'Paint Protects WiFi Network from Hackers','2007-04-04 06:48:10','A company known as SEC Technologies has created a special type of paint that blocks Wi-Fi signals so that you can be sure hackers can ',1,13.32), (2,'Airplanes To Become WiFi Hotspots','2007-04-04 06:49:15','Airlines will start turning their airplanes into WiFi hotspots beginning early next year, WSJ reports. Here\'s what you need to know...',2,54.85), (3,'Planet VIP-195 GSM/WiFi Phone With Windows Messanger','2007-04-04 06:50:47','The phone does comply with IEEE 802.11b and IEEE 802.11g to provide phone capability via WiFi. As GSM phone the VIP-195 support 900/1800/1900 band and GPRS too. It comes with simple button to switch between WiFi or GSM mod',1,16.25); /* Data for the table `tags` */ INSERT INTO `tags`(`id`,`tag`) VALUES (1, 'tag1'),(2, 'tag2'),(3, 'tag3'),(4, 'tag4'),(5, 'tag5'), (6, 'tag6'),(7, 'tag7'),(8, 'tag8'),(9, 'tag9'),(10, 'tag5'); /* Data for the table `links_tags` */ INSERT INTO `links_tags`(`link_id`,`tag_id`) VALUES (1, 1),(1, 2),(1, 3),(1, 4), (2, 5),(2, 6),(2, 7),(2, 8), (3, 9),(3, 1),(3, 7),(3, 10); /* Data for the table `links64` */ INSERT INTO `links64`(`id`,`name`,`created_at`,`description`,`group_id`) VALUES (4294967297,'Paint Protects WiFi Network from Hackers','2007-04-04 06:48:10','A company known as SEC Technologies has created a special type of paint that blocks Wi-Fi signals so that you can be sure hackers can ',1), (4294967298,'Airplanes To Become WiFi Hotspots','2007-04-04 06:49:15','Airlines will start turning their airplanes into WiFi hotspots beginning early next year, WSJ reports. Here\'s what you need to know...',2), (4294967299,'Planet VIP-195 GSM/WiFi Phone With Windows Messanger','2007-04-04 06:50:47','The phone does comply with IEEE 802.11b and IEEE 802.11g to provide phone capability via WiFi. As GSM phone the VIP-195 support 900/1800/1900 band and GPRS too. It comes with simple button to switch between WiFi or GSM mod',1); sphinx-2.0.4-release/api/ruby/spec/sphinx/sphinx-id64.conf0000644000176700017710000000355311301034527022666 0ustar deogardeogarsource src1 { type = mysql sql_host = localhost sql_user = root sql_pass = sql_db = sphinx_test sql_port = 3306 sql_query = SELECT id, name, description, UNIX_TIMESTAMP(created_at) AS created_at, group_id, rating FROM links sql_attr_uint = group_id sql_attr_timestamp = created_at sql_attr_float = rating sql_attr_multi = uint tags from query; SELECT link_id, tag_id FROM links_tags sql_query_info = SELECT * FROM links WHERE id=$id } source src2 { type = mysql sql_host = localhost sql_user = root sql_pass = sql_db = sphinx_test sql_port = 3306 sql_query = SELECT id, name, description, UNIX_TIMESTAMP(created_at) AS created_at, group_id FROM links64 sql_attr_uint = group_id sql_attr_timestamp = created_at sql_query_info = SELECT * FROM links WHERE id=$id } index test1 { source = src1 path = /opt/sphinx-0.9.9-id64/var/data/test1 docinfo = extern morphology = none stopwords = charset_type = utf-8 } index test2 { source = src2 path = /opt/sphinx-0.9.9-id64/var/data/test2 docinfo = extern morphology = none stopwords = charset_type = utf-8 } indexer { mem_limit = 32M } searchd { port = 9312 log = /opt/sphinx-0.9.9-id64/var/log/searchd.log query_log = /opt/sphinx-0.9.9-id64/var/log/query.log read_timeout = 5 max_children = 30 pid_file = /opt/sphinx-0.9.9-id64/var/log/searchd.pid max_matches = 1000 } sphinx-2.0.4-release/api/ruby/install.rb0000644000176700017710000000037711006762325017501 0ustar deogardeogarrequire 'fileutils' sphinx_config = File.dirname(__FILE__) + '/../../../config/sphinx.yml' FileUtils.cp File.dirname(__FILE__) + '/sphinx.yml.tpl', sphinx_config unless File.exist?(sphinx_config) puts IO.read(File.join(File.dirname(__FILE__), 'README')) sphinx-2.0.4-release/api/java/0000755000176700017710000000000011724063141015434 5ustar deogardeogarsphinx-2.0.4-release/api/java/SphinxResult.java0000644000176700017710000000306710760273070020760 0ustar deogardeogar/* * $Id: SphinxResult.java 1172 2008-02-24 13:50:48Z shodan $ */ package org.sphx.api; /** * Search result set. * * Includes retrieved matches array, status code and error/warning messages, * query stats, and per-word stats. */ public class SphinxResult { /** Full-text field namess. */ public String[] fields; /** Attribute names. */ public String[] attrNames; /** Attribute types (refer to SPH_ATTR_xxx constants in SphinxClient). */ public int[] attrTypes; /** Retrieved matches. */ public SphinxMatch[] matches; /** Total matches in this result set. */ public int total; /** Total matches found in the index(es). */ public int totalFound; /** Elapsed time (as reported by searchd), in seconds. */ public float time; /** Per-word statistics. */ public SphinxWordInfo[] words; /** Warning message, if any. */ public String warning = null; /** Error message, if any. */ public String error = null; /** Query status (refer to SEARCHD_xxx constants in SphinxClient). */ private int status = -1; /** Trivial constructor, initializes an empty result set. */ public SphinxResult() { this.attrNames = new String[0]; this.matches = new SphinxMatch[0];; this.words = new SphinxWordInfo[0]; this.fields = new String[0]; this.attrTypes = new int[0]; } /** Get query status. */ public int getStatus() { return status; } /** Set query status (accessible from API package only). */ void setStatus ( int status ) { this.status = status; } } /* * $Id: SphinxResult.java 1172 2008-02-24 13:50:48Z shodan $ */ sphinx-2.0.4-release/api/java/MANIFEST.MF0000644000176700017710000000006410716376077017105 0ustar deogardeogarManifest-Version: 1.0 Main-Class: org.sphx.api.test sphinx-2.0.4-release/api/java/SphinxMatch.java0000644000176700017710000000115210760273070020527 0ustar deogardeogar/* * $Id: SphinxMatch.java 1172 2008-02-24 13:50:48Z shodan $ */ package org.sphx.api; import java.util.*; /** * Matched document information, as in search result. */ public class SphinxMatch { /** Matched document ID. */ public long docId; /** Matched document weight. */ public int weight; /** Matched document attribute values. */ public ArrayList attrValues; /** Trivial constructor. */ public SphinxMatch ( long docId, int weight ) { this.docId = docId; this.weight = weight; this.attrValues = new ArrayList(); } } /* * $Id: SphinxMatch.java 1172 2008-02-24 13:50:48Z shodan $ */ sphinx-2.0.4-release/api/java/mk.cmd0000644000176700017710000000012410716627560016540 0ustar deogardeogar@echo off javac -cp . -d . *.java jar cfm sphinxapi.jar MANIFEST.MF org/sphx/api sphinx-2.0.4-release/api/java/SphinxWordInfo.java0000644000176700017710000000121410760273070021221 0ustar deogardeogar/* * $Id: SphinxWordInfo.java 1172 2008-02-24 13:50:48Z shodan $ */ package org.sphx.api; /** Per-word statistics class. */ public class SphinxWordInfo { /** Word form as returned from search daemon, stemmed or otherwise postprocessed. */ public String word; /** Total amount of matching documents in collection. */ public long docs; /** Total amount of hits (occurences) in collection. */ public long hits; /** Trivial constructor. */ public SphinxWordInfo ( String word, long docs, long hits ) { this.word = word; this.docs = docs; this.hits = hits; } } /* * $Id: SphinxWordInfo.java 1172 2008-02-24 13:50:48Z shodan $ */ sphinx-2.0.4-release/api/java/SphinxClient.java0000644000176700017710000012676211723657702020740 0ustar deogardeogar/* * $Id: SphinxClient.java 3132 2012-03-01 11:38:42Z klirichek $ * * Java version of Sphinx searchd client (Java API) * * Copyright (c) 2007, Vladimir Fedorkov * Copyright (c) 2007-2012, Andrew Aksyonoff * Copyright (c) 2008-2012, Sphinx Technologies Inc * All rights reserved * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License. You should have * received a copy of the GPL license along with this program; if you * did not, you can find it at http://www.gnu.org/ */ package org.sphx.api; import java.io.*; import java.net.*; import java.util.*; import java.net.SocketAddress.*; /** Sphinx client class */ public class SphinxClient { /* matching modes */ public final static int SPH_MATCH_ALL = 0; public final static int SPH_MATCH_ANY = 1; public final static int SPH_MATCH_PHRASE = 2; public final static int SPH_MATCH_BOOLEAN = 3; public final static int SPH_MATCH_EXTENDED = 4; public final static int SPH_MATCH_FULLSCAN = 5; public final static int SPH_MATCH_EXTENDED2 = 6; /* ranking modes (extended2 only) */ public final static int SPH_RANK_PROXIMITY_BM25 = 0; public final static int SPH_RANK_BM25 = 1; public final static int SPH_RANK_NONE = 2; public final static int SPH_RANK_WORDCOUNT = 3; public final static int SPH_RANK_PROXIMITY = 4; public final static int SPH_RANK_MATCHANY = 5; public final static int SPH_RANK_FIELDMASK = 6; public final static int SPH_RANK_SPH04 = 7; public final static int SPH_RANK_EXPR = 8; public final static int SPH_RANK_TOTAL = 9; /* sorting modes */ public final static int SPH_SORT_RELEVANCE = 0; public final static int SPH_SORT_ATTR_DESC = 1; public final static int SPH_SORT_ATTR_ASC = 2; public final static int SPH_SORT_TIME_SEGMENTS = 3; public final static int SPH_SORT_EXTENDED = 4; public final static int SPH_SORT_EXPR = 5; /* grouping functions */ public final static int SPH_GROUPBY_DAY = 0; public final static int SPH_GROUPBY_WEEK = 1; public final static int SPH_GROUPBY_MONTH = 2; public final static int SPH_GROUPBY_YEAR = 3; public final static int SPH_GROUPBY_ATTR = 4; public final static int SPH_GROUPBY_ATTRPAIR = 5; /* searchd reply status codes */ public final static int SEARCHD_OK = 0; public final static int SEARCHD_ERROR = 1; public final static int SEARCHD_RETRY = 2; public final static int SEARCHD_WARNING = 3; /* attribute types */ public final static int SPH_ATTR_INTEGER = 1; public final static int SPH_ATTR_TIMESTAMP = 2; public final static int SPH_ATTR_ORDINAL = 3; public final static int SPH_ATTR_BOOL = 4; public final static int SPH_ATTR_FLOAT = 5; public final static int SPH_ATTR_BIGINT = 6; public final static int SPH_ATTR_STRING = 7; public final static int SPH_ATTR_MULTI = 0x40000001; public final static int SPH_ATTR_MULTI64 = 0x40000002; /* searchd commands */ private final static int SEARCHD_COMMAND_SEARCH = 0; private final static int SEARCHD_COMMAND_EXCERPT = 1; private final static int SEARCHD_COMMAND_UPDATE = 2; private final static int SEARCHD_COMMAND_KEYWORDS = 3; private final static int SEARCHD_COMMAND_PERSIST = 4; private final static int SEARCHD_COMMAND_FLUSHATTRS = 7; /* searchd command versions */ private final static int VER_MAJOR_PROTO = 0x1; private final static int VER_COMMAND_SEARCH = 0x119; private final static int VER_COMMAND_EXCERPT = 0x102; private final static int VER_COMMAND_UPDATE = 0x102; private final static int VER_COMMAND_KEYWORDS = 0x100; private final static int VER_COMMAND_FLUSHATTRS = 0x100; /* filter types */ private final static int SPH_FILTER_VALUES = 0; private final static int SPH_FILTER_RANGE = 1; private final static int SPH_FILTER_FLOATRANGE = 2; private String _host; private int _port; private String _path; private Socket _socket; private int _offset; private int _limit; private int _mode; private int[] _weights; private int _sort; private String _sortby; private int _minId; private int _maxId; private ByteArrayOutputStream _rawFilters; private DataOutputStream _filters; private int _filterCount; private String _groupBy; private int _groupFunc; private String _groupSort; private String _groupDistinct; private int _maxMatches; private int _cutoff; private int _retrycount; private int _retrydelay; private String _latitudeAttr; private String _longitudeAttr; private float _latitude; private float _longitude; private String _error; private String _warning; private boolean _connerror; private int _timeout; private ArrayList _reqs; private Map _indexWeights; private int _ranker; private int _maxQueryTime; private Map _fieldWeights; private Map _overrideTypes; private Map _overrideValues; private String _select; /** Creates a new SphinxClient instance. */ public SphinxClient() { this("localhost", 9312); } /** Creates a new SphinxClient instance, with host:port specification. */ public SphinxClient(String host, int port) { _host = host; _port = port; _path = null; _socket = null; _offset = 0; _limit = 20; _mode = SPH_MATCH_ALL; _sort = SPH_SORT_RELEVANCE; _sortby = ""; _minId = 0; _maxId = 0; _filterCount = 0; _rawFilters = new ByteArrayOutputStream(); _filters = new DataOutputStream(_rawFilters); _groupBy = ""; _groupFunc = SPH_GROUPBY_DAY; _groupSort = "@group desc"; _groupDistinct = ""; _maxMatches = 1000; _cutoff = 0; _retrycount = 0; _retrydelay = 0; _latitudeAttr = null; _longitudeAttr = null; _latitude = 0; _longitude = 0; _error = ""; _warning = ""; _connerror = false; _timeout = 1000; _reqs = new ArrayList(); _weights = null; _indexWeights = new LinkedHashMap(); _fieldWeights = new LinkedHashMap(); _ranker = SPH_RANK_PROXIMITY_BM25; _rankexpr = ""; _overrideTypes = new LinkedHashMap(); _overrideValues = new LinkedHashMap(); _select = "*"; } /** Get last error message, if any. */ public String GetLastError() { return _error; } /** Get last warning message, if any. */ public String GetLastWarning() { return _warning; } /** Get last error flag (to tell network connection errors from searchd errors or broken responses). */ public boolean IsConnectError() { return _connerror; } /** Set searchd host and port to connect to. */ public void SetServer(String host, int port) throws SphinxException { myAssert ( host!=null && host.length()>0, "host name must not be empty" ); myAssert ( port>0 && port<65536, "port must be in 1..65535 range" ); _host = host; _port = port; } /** Set server connection timeout (0 to remove), in milliseconds. */ public void SetConnectTimeout ( int timeout ) { _timeout = Math.max ( timeout, 0 ); } /** Internal method. Sanity check. */ private void myAssert ( boolean condition, String err ) throws SphinxException { if ( !condition ) { _error = err; throw new SphinxException ( err ); } } /** Internal method. String IO helper. */ private static void writeNetUTF8 ( DataOutputStream ostream, String str ) throws IOException { if ( str==null ) { ostream.writeInt ( 0 ); return; } byte[] sBytes = str.getBytes ( "UTF-8" ); int iLen = sBytes.length; ostream.writeInt ( iLen ); ostream.write ( sBytes ); } /** Internal method. String IO helper. */ private static String readNetUTF8(DataInputStream istream) throws IOException { int iLen = istream.readInt(); byte[] sBytes = new byte [ iLen ]; istream.readFully ( sBytes ); return new String ( sBytes, "UTF-8"); } /** Internal method. Unsigned int IO helper. */ private static long readDword ( DataInputStream istream ) throws IOException { long v = (long) istream.readInt (); if ( v<0 ) v += 4294967296L; return v; } /** Internal method. Connect to searchd and exchange versions. */ private Socket _Connect() { if ( _socket!=null ) return _socket; _connerror = false; Socket sock = null; try { sock = new Socket (); sock.setSoTimeout ( _timeout ); InetSocketAddress addr = new InetSocketAddress ( _host, _port ); sock.connect ( addr, _timeout ); DataInputStream sIn = new DataInputStream ( sock.getInputStream() ); int version = sIn.readInt(); if ( version<1 ) { sock.close (); _error = "expected searchd protocol version 1+, got version " + version; return null; } DataOutputStream sOut = new DataOutputStream ( sock.getOutputStream() ); sOut.writeInt ( VER_MAJOR_PROTO ); } catch ( IOException e ) { _error = "connection to " + _host + ":" + _port + " failed: " + e; _connerror = true; try { if ( sock!=null ) sock.close (); } catch ( IOException e1 ) {} return null; } return sock; } /** Internal method. Get and check response packet from searchd. */ private byte[] _GetResponse ( Socket sock ) { /* connect */ DataInputStream sIn = null; InputStream SockInput = null; try { SockInput = sock.getInputStream(); sIn = new DataInputStream ( SockInput ); } catch ( IOException e ) { _error = "getInputStream() failed: " + e; return null; } /* read response */ byte[] response = null; short status = 0, ver = 0; int len = 0; try { /* read status fields */ status = sIn.readShort(); ver = sIn.readShort(); len = sIn.readInt(); /* read response if non-empty */ if ( len<=0 ) { _error = "invalid response packet size (len=" + len + ")"; return null; } response = new byte[len]; sIn.readFully ( response, 0, len ); /* check status */ if ( status==SEARCHD_WARNING ) { DataInputStream in = new DataInputStream ( new ByteArrayInputStream ( response ) ); int iWarnLen = in.readInt (); _warning = new String ( response, 4, iWarnLen ); System.arraycopy ( response, 4+iWarnLen, response, 0, response.length-4-iWarnLen ); } else if ( status==SEARCHD_ERROR ) { _error = "searchd error: " + new String ( response, 4, response.length-4 ); return null; } else if ( status==SEARCHD_RETRY ) { _error = "temporary searchd error: " + new String ( response, 4, response.length-4 ); return null; } else if ( status!=SEARCHD_OK ) { _error = "searched returned unknown status, code=" + status; return null; } } catch ( IOException e ) { if ( len!=0 ) { /* get trace, to provide even more failure details */ PrintWriter ew = new PrintWriter ( new StringWriter() ); e.printStackTrace ( ew ); ew.flush (); ew.close (); String sTrace = ew.toString (); /* build error message */ _error = "failed to read searchd response (status=" + status + ", ver=" + ver + ", len=" + len + ", trace=" + sTrace +")"; } else { _error = "received zero-sized searchd response (searchd crashed?): " + e.getMessage(); } return null; } finally { if ( _socket==null ) { try { if ( sIn!=null ) sIn.close(); if ( sock!=null && !sock.isConnected() ) sock.close(); } catch ( IOException e ) { /* silently ignore close failures; nothing could be done anyway */ } } } return response; } /** Internal method. Connect to searchd, send request, get response as DataInputStream. */ private DataInputStream _DoRequest ( int command, int version, ByteArrayOutputStream req ) { /* connect */ Socket sock = _Connect(); if ( sock==null ) return null; /* send request */ byte[] reqBytes = req.toByteArray(); try { DataOutputStream sockDS = new DataOutputStream ( sock.getOutputStream() ); sockDS.writeShort ( command ); sockDS.writeShort ( version ); sockDS.writeInt ( reqBytes.length ); sockDS.write ( reqBytes ); } catch ( Exception e ) { _error = "network error: " + e; _connerror = true; return null; } /* get response */ byte[] response = _GetResponse ( sock ); if ( response==null ) return null; /* spawn that tampon */ return new DataInputStream ( new ByteArrayInputStream ( response ) ); } /** Set matches offset and limit to return to client, max matches to retrieve on server, and cutoff. */ public void SetLimits ( int offset, int limit, int max, int cutoff ) throws SphinxException { myAssert ( offset>=0, "offset must not be negative" ); myAssert ( limit>0, "limit must be positive" ); myAssert ( max>0, "max must be positive" ); myAssert ( cutoff>=0, "cutoff must not be negative" ); _offset = offset; _limit = limit; _maxMatches = max; _cutoff = cutoff; } /** Set matches offset and limit to return to client, and max matches to retrieve on server. */ public void SetLimits ( int offset, int limit, int max ) throws SphinxException { SetLimits ( offset, limit, max, _cutoff ); } /** Set matches offset and limit to return to client. */ public void SetLimits ( int offset, int limit) throws SphinxException { SetLimits ( offset, limit, _maxMatches, _cutoff ); } /** Set maximum query time, in milliseconds, per-index, 0 means "do not limit". */ public void SetMaxQueryTime ( int maxTime ) throws SphinxException { myAssert ( maxTime>=0, "max_query_time must not be negative" ); _maxQueryTime = maxTime; } /** Set matching mode. */ public void SetMatchMode(int mode) throws SphinxException { myAssert ( mode==SPH_MATCH_ALL || mode==SPH_MATCH_ANY || mode==SPH_MATCH_PHRASE || mode==SPH_MATCH_BOOLEAN || mode==SPH_MATCH_EXTENDED || mode==SPH_MATCH_FULLSCAN || mode==SPH_MATCH_EXTENDED2, "unknown mode value; use one of the SPH_MATCH_xxx constants" ); _mode = mode; } /** Set ranking mode. */ public void SetRankingMode ( int ranker, String rankexpr ) throws SphinxException { myAssert ( ranker>=0 && ranker0 ), "sortby string must not be empty in selected mode" ); _sort = mode; _sortby = ( sortby==null ) ? "" : sortby; } /** Set per-field weights (all values must be positive). WARNING: DEPRECATED, use SetFieldWeights() instead. */ public void SetWeights(int[] weights) throws SphinxException { myAssert ( weights!=null, "weights must not be null" ); for (int i = 0; i < weights.length; i++) { int weight = weights[i]; myAssert ( weight>0, "all weights must be greater than 0" ); } _weights = weights; } /** * Bind per-field weights by field name. * @param fieldWeights hash which maps String index names to Integer weights */ public void SetFieldWeights ( Map fieldWeights ) throws SphinxException { /* FIXME! implement checks here */ _fieldWeights = ( fieldWeights==null ) ? new LinkedHashMap () : fieldWeights; } /** * Bind per-index weights by index name (and enable summing the weights on duplicate matches, instead of replacing them). * @param indexWeights hash which maps String index names to Integer weights */ public void SetIndexWeights ( Map indexWeights ) throws SphinxException { /* FIXME! implement checks here */ _indexWeights = ( indexWeights==null ) ? new LinkedHashMap () : indexWeights; } /** Set document IDs range to match. */ public void SetIDRange ( int min, int max ) throws SphinxException { myAssert ( min<=max, "min must be less or equal to max" ); _minId = min; _maxId = max; } /** Set values filter. Only match records where attribute value is in given set. */ public void SetFilter ( String attribute, int[] values, boolean exclude ) throws SphinxException { myAssert ( values!=null && values.length>0, "values array must not be null or empty" ); myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" ); try { writeNetUTF8 ( _filters, attribute ); _filters.writeInt ( SPH_FILTER_VALUES ); _filters.writeInt ( values.length ); for ( int i=0; i0, "values array must not be null or empty" ); myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" ); try { writeNetUTF8 ( _filters, attribute ); _filters.writeInt ( SPH_FILTER_VALUES ); _filters.writeInt ( values.length ); for ( int i=0; i0, "longitudeAttr string must not be null or empty" ); myAssert ( longitudeAttr!=null && longitudeAttr.length()>0, "longitudeAttr string must not be null or empty" ); _latitudeAttr = latitudeAttr; _longitudeAttr = longitudeAttr; _latitude = latitude; _longitude = longitude; } /** Set grouping attribute and function. */ public void SetGroupBy ( String attribute, int func, String groupsort ) throws SphinxException { myAssert ( func==SPH_GROUPBY_DAY || func==SPH_GROUPBY_WEEK || func==SPH_GROUPBY_MONTH || func==SPH_GROUPBY_YEAR || func==SPH_GROUPBY_ATTR || func==SPH_GROUPBY_ATTRPAIR, "unknown func value; use one of the available SPH_GROUPBY_xxx constants" ); _groupBy = attribute; _groupFunc = func; _groupSort = groupsort; } /** Set grouping attribute and function with default ("@group desc") groupsort (syntax sugar). */ public void SetGroupBy(String attribute, int func) throws SphinxException { SetGroupBy(attribute, func, "@group desc"); } /** Set count-distinct attribute for group-by queries. */ public void SetGroupDistinct(String attribute) { _groupDistinct = attribute; } /** Set distributed retries count and delay. */ public void SetRetries ( int count, int delay ) throws SphinxException { myAssert ( count>=0, "count must not be negative" ); myAssert ( delay>=0, "delay must not be negative" ); _retrycount = count; _retrydelay = delay; } /** Set distributed retries count with default (zero) delay (syntax sugar). */ public void SetRetries ( int count ) throws SphinxException { SetRetries ( count, 0 ); } /** * Set attribute values override (one override list per attribute). * @param values maps Long document IDs to Int/Long/Float values (as specified in attrtype). */ public void SetOverride ( String attrname, int attrtype, Map values ) throws SphinxException { myAssert ( attrname!=null && attrname.length()>0, "attrname must not be empty" ); myAssert ( attrtype==SPH_ATTR_INTEGER || attrtype==SPH_ATTR_TIMESTAMP || attrtype==SPH_ATTR_BOOL || attrtype==SPH_ATTR_FLOAT || attrtype==SPH_ATTR_BIGINT, "unsupported attrtype (must be one of INTEGER, TIMESTAMP, BOOL, FLOAT, or BIGINT)" ); _overrideTypes.put ( attrname, new Integer ( attrtype ) ); _overrideValues.put ( attrname, values ); } /** Set select-list (attributes or expressions), SQL-like syntax. */ public void SetSelect ( String select ) throws SphinxException { myAssert ( select!=null, "select clause string must not be null" ); _select = select; } /** Reset all currently set filters (for multi-queries). */ public void ResetFilters() { /* should we close them first? */ _rawFilters = new ByteArrayOutputStream(); _filters = new DataOutputStream(_rawFilters); _filterCount = 0; /* reset GEO anchor */ _latitudeAttr = null; _longitudeAttr = null; _latitude = 0; _longitude = 0; } /** Clear groupby settings (for multi-queries). */ public void ResetGroupBy () { _groupBy = ""; _groupFunc = SPH_GROUPBY_DAY; _groupSort = "@group desc"; _groupDistinct = ""; } /** Clear all attribute value overrides (for multi-queries). */ public void ResetOverrides () { _overrideTypes.clear (); _overrideValues.clear (); } /** Connect to searchd server and run current search query against all indexes (syntax sugar). */ public SphinxResult Query ( String query ) throws SphinxException { return Query ( query, "*", "" ); } /** Connect to searchd server and run current search query against all indexes (syntax sugar). */ public SphinxResult Query ( String query, String index ) throws SphinxException { return Query ( query, index, "" ); } /** Connect to searchd server and run current search query. */ public SphinxResult Query ( String query, String index, String comment ) throws SphinxException { myAssert ( _reqs==null || _reqs.size()==0, "AddQuery() and Query() can not be combined; use RunQueries() instead" ); AddQuery ( query, index, comment ); SphinxResult[] results = RunQueries(); _reqs = new ArrayList(); /* just in case it failed too early */ if ( results==null || results.length<1 ) return null; /* probably network error; error message should be already filled */ SphinxResult res = results[0]; _warning = res.warning; _error = res.error; if ( res==null || res.getStatus()==SEARCHD_ERROR ) return null; return res; } /** Add new query with current settings to current search request. */ public int AddQuery ( String query, String index, String comment ) throws SphinxException { ByteArrayOutputStream req = new ByteArrayOutputStream(); /* build request */ try { DataOutputStream out = new DataOutputStream(req); out.writeInt(_offset); out.writeInt(_limit); out.writeInt(_mode); out.writeInt(_ranker); if ( _ranker == SPH_RANK_EXPR ) { writeNetUTF8(out, _rankexpr); } out.writeInt(_sort); writeNetUTF8(out, _sortby); writeNetUTF8(out, query); int weightLen = _weights != null ? _weights.length : 0; out.writeInt(weightLen); if (_weights != null) { for (int i = 0; i < _weights.length; i++) out.writeInt(_weights[i]); } writeNetUTF8(out, index); out.writeInt(0); out.writeInt(_minId); out.writeInt(_maxId); /* filters */ out.writeInt(_filterCount); out.write(_rawFilters.toByteArray()); /* group-by, max matches, sort-by-group flag */ out.writeInt(_groupFunc); writeNetUTF8(out, _groupBy); out.writeInt(_maxMatches); writeNetUTF8(out, _groupSort); out.writeInt(_cutoff); out.writeInt(_retrycount); out.writeInt(_retrydelay); writeNetUTF8(out, _groupDistinct); /* anchor point */ if (_latitudeAttr == null || _latitudeAttr.length() == 0 || _longitudeAttr == null || _longitudeAttr.length() == 0) { out.writeInt(0); } else { out.writeInt(1); writeNetUTF8(out, _latitudeAttr); writeNetUTF8(out, _longitudeAttr); out.writeFloat(_latitude); out.writeFloat(_longitude); } /* per-index weights */ out.writeInt(_indexWeights.size()); for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext();) { String indexName = (String) e.next(); Integer weight = (Integer) _indexWeights.get(indexName); writeNetUTF8(out, indexName); out.writeInt(weight.intValue()); } /* max query time */ out.writeInt ( _maxQueryTime ); /* per-field weights */ out.writeInt ( _fieldWeights.size() ); for ( Iterator e=_fieldWeights.keySet().iterator(); e.hasNext(); ) { String field = (String) e.next(); Integer weight = (Integer) _fieldWeights.get ( field ); writeNetUTF8 ( out, field ); out.writeInt ( weight.intValue() ); } /* comment */ writeNetUTF8 ( out, comment ); /* overrides */ out.writeInt ( _overrideTypes.size() ); for ( Iterator e=_overrideTypes.keySet().iterator(); e.hasNext(); ) { String attr = (String) e.next(); Integer type = (Integer) _overrideTypes.get ( attr ); Map values = (Map) _overrideValues.get ( attr ); writeNetUTF8 ( out, attr ); out.writeInt ( type.intValue() ); out.writeInt ( values.size() ); for ( Iterator e2=values.keySet().iterator(); e2.hasNext(); ) { Long id = (Long) e2.next (); out.writeLong ( id.longValue() ); switch ( type.intValue() ) { case SPH_ATTR_FLOAT: out.writeFloat ( ( (Float) values.get ( id ) ).floatValue() ); break; case SPH_ATTR_BIGINT: out.writeLong ( ( (Long)values.get ( id ) ).longValue() ); break; default: out.writeInt ( ( (Integer)values.get ( id ) ).intValue() ); break; } } } /* select-list */ writeNetUTF8 ( out, _select ); /* done! */ out.flush (); int qIndex = _reqs.size(); _reqs.add ( qIndex, req.toByteArray() ); return qIndex; } catch ( Exception e ) { myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() ); } finally { try { _filters.close (); _rawFilters.close (); } catch ( IOException e ) { myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() ); } } return -1; } /** Run all previously added search queries. */ public SphinxResult[] RunQueries() throws SphinxException { if ( _reqs==null || _reqs.size()<1 ) { _error = "no queries defined, issue AddQuery() first"; return null; } /* build the mega-request */ int nreqs = _reqs.size(); ByteArrayOutputStream reqBuf = new ByteArrayOutputStream(); try { DataOutputStream req = new DataOutputStream ( reqBuf ); /* its a client */ req.writeInt(0); req.writeInt ( nreqs ); for ( int i=0; i 0, "BuildExcerpts: Have no documents to process"); myAssert(index != null && index.length() > 0, "BuildExcerpts: Have no index to process documents"); myAssert(words != null && words.length() > 0, "BuildExcerpts: Have no words to highlight"); if (opts == null) opts = new LinkedHashMap(); /* fixup options */ if (!opts.containsKey("before_match")) opts.put("before_match", ""); if (!opts.containsKey("after_match")) opts.put("after_match", ""); if (!opts.containsKey("chunk_separator")) opts.put("chunk_separator", "..."); if (!opts.containsKey("html_strip_mode")) opts.put("html_strip_mode", "index"); if (!opts.containsKey("limit")) opts.put("limit", new Integer(256)); if (!opts.containsKey("limit_passages")) opts.put("limit_passages", new Integer(0)); if (!opts.containsKey("limit_words")) opts.put("limit_words", new Integer(0)); if (!opts.containsKey("around")) opts.put("around", new Integer(5)); if (!opts.containsKey("start_passage_id")) opts.put("start_passage_id", new Integer(1)); if (!opts.containsKey("exact_phrase")) opts.put("exact_phrase", new Integer(0)); if (!opts.containsKey("single_passage")) opts.put("single_passage", new Integer(0)); if (!opts.containsKey("use_boundaries")) opts.put("use_boundaries", new Integer(0)); if (!opts.containsKey("weight_order")) opts.put("weight_order", new Integer(0)); if (!opts.containsKey("load_files")) opts.put("load_files", new Integer(0)); if (!opts.containsKey("allow_empty")) opts.put("allow_empty", new Integer(0)); if (!opts.containsKey("query_mode")) opts.put("query_mode", new Integer(0)); if (!opts.containsKey("force_all_words")) opts.put("force_all_words", new Integer(0)); /* build request */ ByteArrayOutputStream reqBuf = new ByteArrayOutputStream(); DataOutputStream req = new DataOutputStream ( reqBuf ); try { req.writeInt(0); int iFlags = 1; /* remove_spaces */ if ( ((Integer)opts.get("exact_phrase")).intValue()!=0 ) iFlags |= 2; if ( ((Integer)opts.get("single_passage")).intValue()!=0 ) iFlags |= 4; if ( ((Integer)opts.get("use_boundaries")).intValue()!=0 ) iFlags |= 8; if ( ((Integer)opts.get("weight_order")).intValue()!=0 ) iFlags |= 16; if ( ((Integer)opts.get("query_mode")).intValue()!=0 ) iFlags |= 32; if ( ((Integer)opts.get("force_all_words")).intValue()!=0 ) iFlags |= 64; if ( ((Integer)opts.get("load_files")).intValue()!=0 ) iFlags |= 128; if ( ((Integer)opts.get("allow_empty")).intValue()!=0 ) iFlags |= 256; req.writeInt ( iFlags ); writeNetUTF8 ( req, index ); writeNetUTF8 ( req, words ); /* send options */ writeNetUTF8 ( req, (String) opts.get("before_match") ); writeNetUTF8 ( req, (String) opts.get("after_match") ); writeNetUTF8 ( req, (String) opts.get("chunk_separator") ); req.writeInt ( ((Integer) opts.get("limit")).intValue() ); req.writeInt ( ((Integer) opts.get("around")).intValue() ); req.writeInt ( ((Integer) opts.get("limit_passages")).intValue() ); req.writeInt ( ((Integer) opts.get("limit_words")).intValue() ); req.writeInt ( ((Integer) opts.get("start_passage_id")).intValue() ); writeNetUTF8 ( req, (String) opts.get("html_strip_mode") ); /* send documents */ req.writeInt ( docs.length ); for ( int i=0; i * String[] attrs = new String[1]; * * attrs[0] = "group_id"; * long[][] values = new long[2][2]; * * values[0] = new long[2]; values[0][0] = 1; values[0][1] = 123; * values[1] = new long[2]; values[1][0] = 3; values[1][1] = 456; * * int res = cl.UpdateAttributes ( "test1", attrs, values ); * * * @param index index name(s) to update; might be distributed * @param attrs array with the names of the attributes to update * @param values array of updates; each long[] entry must contains document ID * in the first element, and all new attribute values in the following ones * @return -1 on failure, amount of actually found and updated documents (might be 0) on success * * @throws SphinxException on invalid parameters */ public int UpdateAttributes ( String index, String[] attrs, long[][] values ) throws SphinxException { /* check args */ myAssert ( index!=null && index.length()>0, "no index name provided" ); myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" ); myAssert ( values!=null && values.length>0, "no update entries provided" ); for ( int i=0; i * String[] attrs = new String[1]; * * attrs[0] = "group_id"; * int[][] values = new int[1][3]; * * values[0] = new int[3]; values[0][0] = 123; values[0][1] = 456; values[0][2] = 789 * * int res = cl.UpdateAttributesMVA ( "test1", 10, attrs, values ); * * * @param index index name(s) to update; might be distributed * @param docid id of document to update * @param attrs array with the names of the attributes to update * @param values array of updates; each int[] entry must contains all new attribute values * @return -1 on failure, amount of actually found and updated documents (might be 0) on success * * @throws SphinxException on invalid parameters */ public int UpdateAttributesMVA ( String index, long docid, String[] attrs, int[][] values ) throws SphinxException { /* check args */ myAssert ( index!=null && index.length()>0, "no index name provided" ); myAssert ( docid>0, "invalid document id" ); myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" ); myAssert ( values!=null && values.length>0, "no update entries provided" ); myAssert ( values.length==attrs.length, "update entry has wrong length" ); for ( int i=0; i\tconnect to searchd at host HOST\n" ); System.out.print ( "-p, --port\t\tconnect to searchd at port PORT\n" ); System.out.print ( "-i, --index \tsearch through index(es) specified by IDX\n" ); System.out.print ( "-s, --sortby \tsort matches by 'CLAUSE' in sort_extended mode\n" ); System.out.print ( "-S, --sortexpr \tsort matches by 'EXPR' DESC in sort_expr mode\n" ); System.out.print ( "-a, --any\t\tuse 'match any word' matching mode\n" ); System.out.print ( "-b, --boolean\t\tuse 'boolean query' matching mode\n" ); System.out.print ( "-e, --extended\t\tuse 'extended query' matching mode\n" ); System.out.print ( "-ph,--phrase\t\tuse 'exact phrase' matching mode\n" ); // System.out.print ( "-f, --filter \tfilter by attribute 'ATTR' (default is 'group_id')\n" ); // System.out.print ( "-v, --value \tadd VAL to allowed 'group_id' values list\n" ); System.out.print ( "-g, --groupby \tgroup matches by 'EXPR'\n" ); System.out.print ( "-gs,--groupsort \tsort groups by 'EXPR'\n" ); // System.out.print ( "-d, --distinct \tcount distinct values of 'ATTR''\n" ); System.out.print ( "-l, --limit \tretrieve COUNT matches (default: 20)\n" ); System.out.print ( "-ga, --geoanchor \n" ); System.out.print ( "\t\t\tset anchor for geodistance\n" ); System.out.print ( "--select \tselect the listed expressions only\n" ); System.exit ( 0 ); } StringBuffer q = new StringBuffer(); String host = "localhost"; int port = 9312; int mode = SphinxClient.SPH_MATCH_ALL; String index = "*"; int offset = 0; int limit = 20; int sortMode = SphinxClient.SPH_SORT_RELEVANCE; String sortClause = ""; String groupBy = ""; String groupSort = ""; SphinxClient cl = new SphinxClient(); /* parse arguments */ if ( argv!=null) for ( int i=0; i0 ) cl.SetGroupBy ( groupBy, SphinxClient.SPH_GROUPBY_ATTR, groupSort ); SphinxResult res = cl.Query(q.toString(), index); if ( res==null ) { System.err.println ( "Error: " + cl.GetLastError() ); System.exit ( 1 ); } if ( cl.GetLastWarning()!=null && cl.GetLastWarning().length()>0 ) System.out.println ( "WARNING: " + cl.GetLastWarning() + "\n" ); /* print me out */ System.out.println ( "Query '" + q + "' retrieved " + res.total + " of " + res.totalFound + " matches in " + res.time + " sec." ); System.out.println ( "Query stats:" ); for ( int i=0; i\tconnect to searchd at host HOST\n" ); print ( "-p, --port\t\tconnect to searchd at port PORT\n" ); print ( "-i, --index \tsearch through index(es) specified by IDX\n" ); print ( "-s, --sortby \tsort matches by 'CLAUSE' in sort_extended mode\n" ); print ( "-S, --sortexpr \tsort matches by 'EXPR' DESC in sort_expr mode\n" ); print ( "-a, --any\t\tuse 'match any word' matching mode\n" ); print ( "-b, --boolean\t\tuse 'boolean query' matching mode\n" ); print ( "-e, --extended\t\tuse 'extended query' matching mode\n" ); print ( "-ph,--phrase\t\tuse 'exact phrase' matching mode\n" ); print ( "-f, --filter \tfilter by attribute 'ATTR' (default is 'group_id')\n" ); print ( "-fr,--filterrange \n\t\t\tadd specified range filter\n" ); print ( "-v, --value \tadd VAL to allowed 'group_id' values list\n" ); print ( "-g, --groupby \tgroup matches by 'EXPR'\n" ); print ( "-gs,--groupsort \tsort groups by 'EXPR'\n" ); print ( "-d, --distinct \tcount distinct values of 'ATTR''\n" ); print ( "-l, --limit \tretrieve COUNT matches (default: 20)\n" ); print ( "--select \tuse 'EXPRLIST' as select-list (default: *)\n" ); exit; } $args = array(); foreach ( $_SERVER["argv"] as $arg ) $args[] = $arg; $cl = new SphinxClient (); $q = ""; $sql = ""; $mode = SPH_MATCH_ALL; $host = "localhost"; $port = 9312; $index = "*"; $groupby = ""; $groupsort = "@group desc"; $filter = "group_id"; $filtervals = array(); $distinct = ""; $sortby = ""; $sortexpr = ""; $limit = 20; $ranker = SPH_RANK_PROXIMITY_BM25; $select = ""; for ( $i=0; $iSetFilterRange ( $args[++$i], $args[++$i], $args[++$i] ); else if ( $arg=="-r" ) { $arg = strtolower($args[++$i]); if ( $arg=="bm25" ) $ranker = SPH_RANK_BM25; if ( $arg=="none" ) $ranker = SPH_RANK_NONE; if ( $arg=="wordcount" )$ranker = SPH_RANK_WORDCOUNT; if ( $arg=="fieldmask" )$ranker = SPH_RANK_FIELDMASK; if ( $arg=="sph04" ) $ranker = SPH_RANK_SPH04; } else $q .= $args[$i] . " "; } //////////// // do query //////////// $cl->SetServer ( $host, $port ); $cl->SetConnectTimeout ( 1 ); $cl->SetArrayResult ( true ); $cl->SetWeights ( array ( 100, 1 ) ); $cl->SetMatchMode ( $mode ); if ( count($filtervals) ) $cl->SetFilter ( $filter, $filtervals ); if ( $groupby ) $cl->SetGroupBy ( $groupby, SPH_GROUPBY_ATTR, $groupsort ); if ( $sortby ) $cl->SetSortMode ( SPH_SORT_EXTENDED, $sortby ); if ( $sortexpr ) $cl->SetSortMode ( SPH_SORT_EXPR, $sortexpr ); if ( $distinct ) $cl->SetGroupDistinct ( $distinct ); if ( $select ) $cl->SetSelect ( $select ); if ( $limit ) $cl->SetLimits ( 0, $limit, ( $limit>1000 ) ? $limit : 1000 ); $cl->SetRankingMode ( $ranker ); $res = $cl->Query ( $q, $index ); //////////////// // print me out //////////////// if ( $res===false ) { print "Query failed: " . $cl->GetLastError() . ".\n"; } else { if ( $cl->GetLastWarning() ) print "WARNING: " . $cl->GetLastWarning() . "\n\n"; print "Query '$q' retrieved $res[total] of $res[total_found] matches in $res[time] sec.\n"; print "Query stats:\n"; if ( is_array($res["words"]) ) foreach ( $res["words"] as $word => $info ) print " '$word' found $info[hits] times in $info[docs] documents\n"; print "\n"; if ( is_array($res["matches"]) ) { $n = 1; print "Matches:\n"; foreach ( $res["matches"] as $docinfo ) { print "$n. doc_id=$docinfo[id], weight=$docinfo[weight]"; foreach ( $res["attrs"] as $attrname => $attrtype ) { $value = $docinfo["attrs"][$attrname]; if ( $attrtype==SPH_ATTR_MULTI || $attrtype==SPH_ATTR_MULTI64 ) { $value = "(" . join ( ",", $value ) .")"; } else { if ( $attrtype==SPH_ATTR_TIMESTAMP ) $value = date ( "Y-m-d H:i:s", $value ); } print ", $attrname=$value"; } print "\n"; $n++; } } } // // $Id: test.php 2903 2011-08-04 13:30:49Z shodan $ // ?>sphinx-2.0.4-release/api/test2.py0000644000176700017710000000110310531144210016111 0ustar deogardeogar# # $Id: test2.py 489 2006-11-22 22:00:40Z shodan $ # from sphinxapi import * import sys docs = ['this is my test text to be highlighted','this is another test text to be highlighted'] words = 'test text' index = 'test1' opts = {'before_match':'', 'after_match':'', 'chunk_separator':' ... ', 'limit':400, 'around':15} cl = SphinxClient() res = cl.BuildExcerpts(docs, index, words, opts) if not res: print 'ERROR:', cl.GetLastError() else: n = 0 for entry in res: n += 1 print 'n=%d, res=%s' % (n, entry) # # $Id: test2.py 489 2006-11-22 22:00:40Z shodan $ # sphinx-2.0.4-release/api/test.py0000644000176700017710000000640311301034527016044 0ustar deogardeogar# # $Id: test.py 2081 2009-11-18 18:13:43Z shodan $ # from sphinxapi import * import sys, time if not sys.argv[1:]: print "Usage: python test.py [OPTIONS] query words\n" print "Options are:" print "-h, --host \tconnect to searchd at host HOST" print "-p, --port\t\tconnect to searchd at port PORT" print "-i, --index \tsearch through index(es) specified by IDX" print "-s, --sortby \tsort matches by 'EXPR'" print "-a, --any\t\tuse 'match any word' matching mode" print "-b, --boolean\t\tuse 'boolean query' matching mode" print "-e, --extended\t\tuse 'extended query' matching mode" print "-f, --filter \tfilter by attribute 'ATTR' (default is 'group_id')" print "-v, --value \tadd VAL to allowed 'group_id' values list" print "-g, --groupby \tgroup matches by 'EXPR'" print "-gs,--groupsort \tsort groups by 'EXPR'" print "-l, --limit \tretrieve COUNT matches (default is 20)" sys.exit(0) q = '' mode = SPH_MATCH_ALL host = 'localhost' port = 9312 index = '*' filtercol = 'group_id' filtervals = [] sortby = '' groupby = '' groupsort = '@group desc' limit = 0 i = 1 while (i sphinx-2.0.4-release/api/libsphinxclient/test03.sln0000644000176700017710000000160511424366077021567 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 8.00 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test03", "test03.vcproj", "{3F3C7CA8-E864-4FB5-8EB0-A114FAFBE24E}" ProjectSection(ProjectDependencies) = postProject EndProjectSection EndProject Global GlobalSection(SolutionConfiguration) = preSolution Debug = Debug Release = Release EndGlobalSection GlobalSection(ProjectConfiguration) = postSolution {3F3C7CA8-E864-4FB5-8EB0-A114FAFBE24E}.Debug.ActiveCfg = Debug|Win32 {3F3C7CA8-E864-4FB5-8EB0-A114FAFBE24E}.Debug.Build.0 = Debug|Win32 {3F3C7CA8-E864-4FB5-8EB0-A114FAFBE24E}.Release.ActiveCfg = Release|Win32 {3F3C7CA8-E864-4FB5-8EB0-A114FAFBE24E}.Release.Build.0 = Release|Win32 EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution EndGlobalSection GlobalSection(ExtensibilityAddIns) = postSolution EndGlobalSection EndGlobal sphinx-2.0.4-release/api/libsphinxclient/ltmain.sh0000644000176700017710000054666211102461317021552 0ustar deogardeogar# ltmain.sh - Provide generalized library-building support services. # NOTE: Changing this file will not affect anything until you rerun configure. # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004 # Free Software Foundation, Inc. # Originally by Gordon Matzigkeit , 1996 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. basename="s,^.*/,,g" # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh # is ksh but when the shell is invoked as "sh" and the current value of # the _XPG environment variable is not equal to 1 (one), the special # positional parameter $0, within a function call, is the name of the # function. progpath="$0" # RH: define SED for historic ltconfig's generated by Libtool 1.3 [ -z "$SED" ] && SED=sed # The name of this program: progname=`echo "$progpath" | $SED $basename` modename="$progname" # Global variables: EXIT_SUCCESS=0 EXIT_FAILURE=1 PROGRAM=ltmain.sh PACKAGE=libtool VERSION=1.5.6 TIMESTAMP=" (1.1220.2.95 2004/04/11 05:50:42)" # Check that we have a working $echo. if test "X$1" = X--no-reexec; then # Discard the --no-reexec flag, and continue. shift elif test "X$1" = X--fallback-echo; then # Avoid inline document here, it may be left over : elif test "X`($echo '\t') 2>/dev/null`" = 'X\t'; then # Yippee, $echo works! : else # Restart under the correct shell, and then maybe $echo will work. exec $SHELL "$progpath" --no-reexec ${1+"$@"} fi if test "X$1" = X--fallback-echo; then # used as fallback echo shift cat <&2 $echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2 exit $EXIT_FAILURE fi # Global variables. mode=$default_mode nonopt= prev= prevopt= run= show="$echo" show_help= execute_dlfiles= lo2o="s/\\.lo\$/.${objext}/" o2lo="s/\\.${objext}\$/.lo/" ##################################### # Shell function definitions: # This seems to be the best place for them # func_win32_libid arg # return the library type of file 'arg' # # Need a lot of goo to handle *both* DLLs and import libs # Has to be a shell function in order to 'eat' the argument # that is supplied when $file_magic_command is called. func_win32_libid () { win32_libid_type="unknown" win32_fileres=`file -L $1 2>/dev/null` case $win32_fileres in *ar\ archive\ import\ library*) # definitely import win32_libid_type="x86 archive import" ;; *ar\ archive*) # could be an import, or static if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | \ $EGREP -e 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then win32_nmres=`eval $NM -f posix -A $1 | \ sed -n -e '1,100{/ I /{x;/import/!{s/^/import/;h;p;};x;};}'` if test "X$win32_nmres" = "Ximport" ; then win32_libid_type="x86 archive import" else win32_libid_type="x86 archive static" fi fi ;; *DLL*) win32_libid_type="x86 DLL" ;; *executable*) # but shell scripts are "executable" too... case $win32_fileres in *MS\ Windows\ PE\ Intel*) win32_libid_type="x86 DLL" ;; esac ;; esac $echo $win32_libid_type } # func_infer_tag arg # Infer tagged configuration to use if any are available and # if one wasn't chosen via the "--tag" command line option. # Only attempt this if the compiler in the base compile # command doesn't match the default compiler. # arg is usually of the form 'gcc ...' func_infer_tag () { if test -n "$available_tags" && test -z "$tagname"; then CC_quoted= for arg in $CC; do case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac CC_quoted="$CC_quoted $arg" done case $@ in # Blanks in the command may have been stripped by the calling shell, # but not from the CC environment variable when configure was run. " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) ;; # Blanks at the start of $base_compile will cause this to fail # if we don't check for them as well. *) for z in $available_tags; do if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then # Evaluate the configuration. eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" CC_quoted= for arg in $CC; do # Double-quote args containing other shell metacharacters. case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac CC_quoted="$CC_quoted $arg" done case "$@ " in " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) # The compiler in the base compile command matches # the one in the tagged configuration. # Assume this is the tagged configuration we want. tagname=$z break ;; esac fi done # If $tagname still isn't set, then no tagged configuration # was found and let the user know that the "--tag" command # line option must be used. if test -z "$tagname"; then $echo "$modename: unable to infer tagged configuration" $echo "$modename: specify a tag with \`--tag'" 1>&2 exit $EXIT_FAILURE # else # $echo "$modename: using $tagname tagged configuration" fi ;; esac fi } # End of Shell function definitions ##################################### # Darwin sucks eval std_shrext=\"$shrext_cmds\" # Parse our command line options once, thoroughly. while test "$#" -gt 0 do arg="$1" shift case $arg in -*=*) optarg=`$echo "X$arg" | $Xsed -e 's/[-_a-zA-Z0-9]*=//'` ;; *) optarg= ;; esac # If the previous option needs an argument, assign it. if test -n "$prev"; then case $prev in execute_dlfiles) execute_dlfiles="$execute_dlfiles $arg" ;; tag) tagname="$arg" preserve_args="${preserve_args}=$arg" # Check whether tagname contains only valid characters case $tagname in *[!-_A-Za-z0-9,/]*) $echo "$progname: invalid tag name: $tagname" 1>&2 exit $EXIT_FAILURE ;; esac case $tagname in CC) # Don't test for the "default" C tag, as we know, it's there, but # not specially marked. ;; *) if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "$progpath" > /dev/null; then taglist="$taglist $tagname" # Evaluate the configuration. eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$tagname'$/,/^# ### END LIBTOOL TAG CONFIG: '$tagname'$/p' < $progpath`" else $echo "$progname: ignoring unknown tag $tagname" 1>&2 fi ;; esac ;; *) eval "$prev=\$arg" ;; esac prev= prevopt= continue fi # Have we seen a non-optional argument yet? case $arg in --help) show_help=yes ;; --version) $echo "$PROGRAM (GNU $PACKAGE) $VERSION$TIMESTAMP" $echo $echo "Copyright (C) 2003 Free Software Foundation, Inc." $echo "This is free software; see the source for copying conditions. There is NO" $echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." exit $EXIT_SUCCESS ;; --config) ${SED} -e '1,/^# ### BEGIN LIBTOOL CONFIG/d' -e '/^# ### END LIBTOOL CONFIG/,$d' $progpath # Now print the configurations for the tags. for tagname in $taglist; do ${SED} -n -e "/^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$/,/^# ### END LIBTOOL TAG CONFIG: $tagname$/p" < "$progpath" done exit $EXIT_SUCCESS ;; --debug) $echo "$progname: enabling shell trace mode" set -x preserve_args="$preserve_args $arg" ;; --dry-run | -n) run=: ;; --features) $echo "host: $host" if test "$build_libtool_libs" = yes; then $echo "enable shared libraries" else $echo "disable shared libraries" fi if test "$build_old_libs" = yes; then $echo "enable static libraries" else $echo "disable static libraries" fi exit $EXIT_SUCCESS ;; --finish) mode="finish" ;; --mode) prevopt="--mode" prev=mode ;; --mode=*) mode="$optarg" ;; --preserve-dup-deps) duplicate_deps="yes" ;; --quiet | --silent) show=: preserve_args="$preserve_args $arg" ;; --tag) prevopt="--tag" prev=tag ;; --tag=*) set tag "$optarg" ${1+"$@"} shift prev=tag preserve_args="$preserve_args --tag" ;; -dlopen) prevopt="-dlopen" prev=execute_dlfiles ;; -*) $echo "$modename: unrecognized option \`$arg'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE ;; *) nonopt="$arg" break ;; esac done if test -n "$prevopt"; then $echo "$modename: option \`$prevopt' requires an argument" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi # If this variable is set in any of the actions, the command in it # will be execed at the end. This prevents here-documents from being # left over by shells. exec_cmd= if test -z "$show_help"; then # Infer the operation mode. if test -z "$mode"; then $echo "*** Warning: inferring the mode of operation is deprecated." 1>&2 $echo "*** Future versions of Libtool will require -mode=MODE be specified." 1>&2 case $nonopt in *cc | cc* | *++ | gcc* | *-gcc* | g++* | xlc*) mode=link for arg do case $arg in -c) mode=compile break ;; esac done ;; *db | *dbx | *strace | *truss) mode=execute ;; *install*|cp|mv) mode=install ;; *rm) mode=uninstall ;; *) # If we have no mode, but dlfiles were specified, then do execute mode. test -n "$execute_dlfiles" && mode=execute # Just use the default operation mode. if test -z "$mode"; then if test -n "$nonopt"; then $echo "$modename: warning: cannot infer operation mode from \`$nonopt'" 1>&2 else $echo "$modename: warning: cannot infer operation mode without MODE-ARGS" 1>&2 fi fi ;; esac fi # Only execute mode is allowed to have -dlopen flags. if test -n "$execute_dlfiles" && test "$mode" != execute; then $echo "$modename: unrecognized option \`-dlopen'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi # Change the help message to a mode-specific one. generic_help="$help" help="Try \`$modename --help --mode=$mode' for more information." # These modes are in order of execution frequency so that they run quickly. case $mode in # libtool compile mode compile) modename="$modename: compile" # Get the compilation command and the source file. base_compile= srcfile="$nonopt" # always keep a non-empty value in "srcfile" suppress_opt=yes suppress_output= arg_mode=normal libobj= later= for arg do case "$arg_mode" in arg ) # do not "continue". Instead, add this to base_compile lastarg="$arg" arg_mode=normal ;; target ) libobj="$arg" arg_mode=normal continue ;; normal ) # Accept any command-line options. case $arg in -o) if test -n "$libobj" ; then $echo "$modename: you cannot specify \`-o' more than once" 1>&2 exit $EXIT_FAILURE fi arg_mode=target continue ;; -static | -prefer-pic | -prefer-non-pic) later="$later $arg" continue ;; -no-suppress) suppress_opt=no continue ;; -Xcompiler) arg_mode=arg # the next one goes into the "base_compile" arg list continue # The current "srcfile" will either be retained or ;; # replaced later. I would guess that would be a bug. -Wc,*) args=`$echo "X$arg" | $Xsed -e "s/^-Wc,//"` lastarg= save_ifs="$IFS"; IFS=',' for arg in $args; do IFS="$save_ifs" # Double-quote args containing other shell metacharacters. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac lastarg="$lastarg $arg" done IFS="$save_ifs" lastarg=`$echo "X$lastarg" | $Xsed -e "s/^ //"` # Add the arguments to base_compile. base_compile="$base_compile $lastarg" continue ;; * ) # Accept the current argument as the source file. # The previous "srcfile" becomes the current argument. # lastarg="$srcfile" srcfile="$arg" ;; esac # case $arg ;; esac # case $arg_mode # Aesthetically quote the previous argument. lastarg=`$echo "X$lastarg" | $Xsed -e "$sed_quote_subst"` case $lastarg in # Double-quote args containing other shell metacharacters. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") lastarg="\"$lastarg\"" ;; esac base_compile="$base_compile $lastarg" done # for arg case $arg_mode in arg) $echo "$modename: you must specify an argument for -Xcompile" exit $EXIT_FAILURE ;; target) $echo "$modename: you must specify a target with \`-o'" 1>&2 exit $EXIT_FAILURE ;; *) # Get the name of the library object. [ -z "$libobj" ] && libobj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%'` ;; esac # Recognize several different file suffixes. # If the user specifies -o file.o, it is replaced with file.lo xform='[cCFSifmso]' case $libobj in *.ada) xform=ada ;; *.adb) xform=adb ;; *.ads) xform=ads ;; *.asm) xform=asm ;; *.c++) xform=c++ ;; *.cc) xform=cc ;; *.ii) xform=ii ;; *.class) xform=class ;; *.cpp) xform=cpp ;; *.cxx) xform=cxx ;; *.f90) xform=f90 ;; *.for) xform=for ;; *.java) xform=java ;; esac libobj=`$echo "X$libobj" | $Xsed -e "s/\.$xform$/.lo/"` case $libobj in *.lo) obj=`$echo "X$libobj" | $Xsed -e "$lo2o"` ;; *) $echo "$modename: cannot determine name of library object from \`$libobj'" 1>&2 exit $EXIT_FAILURE ;; esac func_infer_tag $base_compile for arg in $later; do case $arg in -static) build_old_libs=yes continue ;; -prefer-pic) pic_mode=yes continue ;; -prefer-non-pic) pic_mode=no continue ;; esac done objname=`$echo "X$obj" | $Xsed -e 's%^.*/%%'` xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'` if test "X$xdir" = "X$obj"; then xdir= else xdir=$xdir/ fi lobj=${xdir}$objdir/$objname if test -z "$base_compile"; then $echo "$modename: you must specify a compilation command" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi # Delete any leftover library objects. if test "$build_old_libs" = yes; then removelist="$obj $lobj $libobj ${libobj}T" else removelist="$lobj $libobj ${libobj}T" fi $run $rm $removelist trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 # On Cygwin there's no "real" PIC flag so we must build both object types case $host_os in cygwin* | mingw* | pw32* | os2*) pic_mode=default ;; esac if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then # non-PIC code in shared libraries is not supported pic_mode=default fi # Calculate the filename of the output object if compiler does # not support -o with -c if test "$compiler_c_o" = no; then output_obj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext} lockfile="$output_obj.lock" removelist="$removelist $output_obj $lockfile" trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 else output_obj= need_locks=no lockfile= fi # Lock this critical section if it is needed # We use this script file to make the link, it avoids creating a new file if test "$need_locks" = yes; then until $run ln "$progpath" "$lockfile" 2>/dev/null; do $show "Waiting for $lockfile to be removed" sleep 2 done elif test "$need_locks" = warn; then if test -f "$lockfile"; then $echo "\ *** ERROR, $lockfile exists and contains: `cat $lockfile 2>/dev/null` This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $run $rm $removelist exit $EXIT_FAILURE fi $echo $srcfile > "$lockfile" fi if test -n "$fix_srcfile_path"; then eval srcfile=\"$fix_srcfile_path\" fi $run $rm "$libobj" "${libobj}T" # Create a libtool object file (analogous to a ".la" file), # but don't create it if we're doing a dry run. test -z "$run" && cat > ${libobj}T </dev/null`" != "X$srcfile"; then $echo "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $run $rm $removelist exit $EXIT_FAILURE fi # Just move the object if needed, then go on to compile the next one if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then $show "$mv $output_obj $lobj" if $run $mv $output_obj $lobj; then : else error=$? $run $rm $removelist exit $error fi fi # Append the name of the PIC object to the libtool object file. test -z "$run" && cat >> ${libobj}T <> ${libobj}T </dev/null`" != "X$srcfile"; then $echo "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $run $rm $removelist exit $EXIT_FAILURE fi # Just move the object if needed if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then $show "$mv $output_obj $obj" if $run $mv $output_obj $obj; then : else error=$? $run $rm $removelist exit $error fi fi # Append the name of the non-PIC object the libtool object file. # Only append if the libtool object file exists. test -z "$run" && cat >> ${libobj}T <> ${libobj}T <&2 fi if test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi else if test -z "$pic_flag" && test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi fi build_libtool_libs=no build_old_libs=yes prefer_static_libs=yes break ;; esac done # See if our shared archives depend on static archives. test -n "$old_archive_from_new_cmds" && build_old_libs=yes # Go through the arguments, transforming them on the way. while test "$#" -gt 0; do arg="$1" shift case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") qarg=\"`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`\" ### testsuite: skip nested quoting test ;; *) qarg=$arg ;; esac libtool_args="$libtool_args $qarg" # If the previous option needs an argument, assign it. if test -n "$prev"; then case $prev in output) compile_command="$compile_command @OUTPUT@" finalize_command="$finalize_command @OUTPUT@" ;; esac case $prev in dlfiles|dlprefiles) if test "$preload" = no; then # Add the symbol object into the linking commands. compile_command="$compile_command @SYMFILE@" finalize_command="$finalize_command @SYMFILE@" preload=yes fi case $arg in *.la | *.lo) ;; # We handle these cases below. force) if test "$dlself" = no; then dlself=needless export_dynamic=yes fi prev= continue ;; self) if test "$prev" = dlprefiles; then dlself=yes elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then dlself=yes else dlself=needless export_dynamic=yes fi prev= continue ;; *) if test "$prev" = dlfiles; then dlfiles="$dlfiles $arg" else dlprefiles="$dlprefiles $arg" fi prev= continue ;; esac ;; expsyms) export_symbols="$arg" if test ! -f "$arg"; then $echo "$modename: symbol file \`$arg' does not exist" exit $EXIT_FAILURE fi prev= continue ;; expsyms_regex) export_symbols_regex="$arg" prev= continue ;; inst_prefix) inst_prefix_dir="$arg" prev= continue ;; precious_regex) precious_files_regex="$arg" prev= continue ;; release) release="-$arg" prev= continue ;; objectlist) if test -f "$arg"; then save_arg=$arg moreargs= for fil in `cat $save_arg` do # moreargs="$moreargs $fil" arg=$fil # A libtool-controlled object. # Check to see that this really is a libtool object. if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then pic_object= non_pic_object= # Read the .lo file # If there is no directory component, then add one. case $arg in */* | *\\*) . $arg ;; *) . ./$arg ;; esac if test -z "$pic_object" || \ test -z "$non_pic_object" || test "$pic_object" = none && \ test "$non_pic_object" = none; then $echo "$modename: cannot find name of object for \`$arg'" 1>&2 exit $EXIT_FAILURE fi # Extract subdirectory from the argument. xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` if test "X$xdir" = "X$arg"; then xdir= else xdir="$xdir/" fi if test "$pic_object" != none; then # Prepend the subdirectory the object is found in. pic_object="$xdir$pic_object" if test "$prev" = dlfiles; then if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then dlfiles="$dlfiles $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test "$prev" = dlprefiles; then # Preload the old-style object. dlprefiles="$dlprefiles $pic_object" prev= fi # A PIC object. libobjs="$libobjs $pic_object" arg="$pic_object" fi # Non-PIC object. if test "$non_pic_object" != none; then # Prepend the subdirectory the object is found in. non_pic_object="$xdir$non_pic_object" # A standard non-PIC object non_pic_objects="$non_pic_objects $non_pic_object" if test -z "$pic_object" || test "$pic_object" = none ; then arg="$non_pic_object" fi fi else # Only an error if not doing a dry-run. if test -z "$run"; then $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 exit $EXIT_FAILURE else # Dry-run case. # Extract subdirectory from the argument. xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` if test "X$xdir" = "X$arg"; then xdir= else xdir="$xdir/" fi pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` libobjs="$libobjs $pic_object" non_pic_objects="$non_pic_objects $non_pic_object" fi fi done else $echo "$modename: link input file \`$save_arg' does not exist" exit $EXIT_FAILURE fi arg=$save_arg prev= continue ;; rpath | xrpath) # We need an absolute path. case $arg in [\\/]* | [A-Za-z]:[\\/]*) ;; *) $echo "$modename: only absolute run-paths are allowed" 1>&2 exit $EXIT_FAILURE ;; esac if test "$prev" = rpath; then case "$rpath " in *" $arg "*) ;; *) rpath="$rpath $arg" ;; esac else case "$xrpath " in *" $arg "*) ;; *) xrpath="$xrpath $arg" ;; esac fi prev= continue ;; xcompiler) compiler_flags="$compiler_flags $qarg" prev= compile_command="$compile_command $qarg" finalize_command="$finalize_command $qarg" continue ;; xlinker) linker_flags="$linker_flags $qarg" compiler_flags="$compiler_flags $wl$qarg" prev= compile_command="$compile_command $wl$qarg" finalize_command="$finalize_command $wl$qarg" continue ;; xcclinker) linker_flags="$linker_flags $qarg" compiler_flags="$compiler_flags $qarg" prev= compile_command="$compile_command $qarg" finalize_command="$finalize_command $qarg" continue ;; shrext) shrext_cmds="$arg" prev= continue ;; *) eval "$prev=\"\$arg\"" prev= continue ;; esac fi # test -n "$prev" prevarg="$arg" case $arg in -all-static) if test -n "$link_static_flag"; then compile_command="$compile_command $link_static_flag" finalize_command="$finalize_command $link_static_flag" fi continue ;; -allow-undefined) # FIXME: remove this flag sometime in the future. $echo "$modename: \`-allow-undefined' is deprecated because it is the default" 1>&2 continue ;; -avoid-version) avoid_version=yes continue ;; -dlopen) prev=dlfiles continue ;; -dlpreopen) prev=dlprefiles continue ;; -export-dynamic) export_dynamic=yes continue ;; -export-symbols | -export-symbols-regex) if test -n "$export_symbols" || test -n "$export_symbols_regex"; then $echo "$modename: more than one -exported-symbols argument is not allowed" exit $EXIT_FAILURE fi if test "X$arg" = "X-export-symbols"; then prev=expsyms else prev=expsyms_regex fi continue ;; -inst-prefix-dir) prev=inst_prefix continue ;; # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* # so, if we see these flags be careful not to treat them like -L -L[A-Z][A-Z]*:*) case $with_gcc/$host in no/*-*-irix* | /*-*-irix*) compile_command="$compile_command $arg" finalize_command="$finalize_command $arg" ;; esac continue ;; -L*) dir=`$echo "X$arg" | $Xsed -e 's/^-L//'` # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) absdir=`cd "$dir" && pwd` if test -z "$absdir"; then $echo "$modename: cannot determine absolute directory name of \`$dir'" 1>&2 exit $EXIT_FAILURE fi dir="$absdir" ;; esac case "$deplibs " in *" -L$dir "*) ;; *) deplibs="$deplibs -L$dir" lib_search_path="$lib_search_path $dir" ;; esac case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) case :$dllsearchpath: in *":$dir:"*) ;; *) dllsearchpath="$dllsearchpath:$dir";; esac ;; esac continue ;; -l*) if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then case $host in *-*-cygwin* | *-*-pw32* | *-*-beos*) # These systems don't actually have a C or math library (as such) continue ;; *-*-mingw* | *-*-os2*) # These systems don't actually have a C library (as such) test "X$arg" = "X-lc" && continue ;; *-*-openbsd* | *-*-freebsd*) # Do not include libc due to us having libc/libc_r. test "X$arg" = "X-lc" && continue ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C and math libraries are in the System framework deplibs="$deplibs -framework System" continue esac elif test "X$arg" = "X-lc_r"; then case $host in *-*-openbsd* | *-*-freebsd*) # Do not include libc_r directly, use -pthread flag. continue ;; esac fi deplibs="$deplibs $arg" continue ;; -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe) deplibs="$deplibs $arg" continue ;; -module) module=yes continue ;; # gcc -m* arguments should be passed to the linker via $compiler_flags # in order to pass architecture information to the linker # (e.g. 32 vs 64-bit). This may also be accomplished via -Wl,-mfoo # but this is not reliable with gcc because gcc may use -mfoo to # select a different linker, different libraries, etc, while # -Wl,-mfoo simply passes -mfoo to the linker. -m*) # Unknown arguments in both finalize_command and compile_command need # to be aesthetically quoted because they are evaled later. arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac compile_command="$compile_command $arg" finalize_command="$finalize_command $arg" if test "$with_gcc" = "yes" ; then compiler_flags="$compiler_flags $arg" fi continue ;; -shrext) prev=shrext continue ;; -no-fast-install) fast_install=no continue ;; -no-install) case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) # The PATH hackery in wrapper scripts is required on Windows # in order for the loader to find any dlls it needs. $echo "$modename: warning: \`-no-install' is ignored for $host" 1>&2 $echo "$modename: warning: assuming \`-no-fast-install' instead" 1>&2 fast_install=no ;; *) no_install=yes ;; esac continue ;; -no-undefined) allow_undefined=no continue ;; -objectlist) prev=objectlist continue ;; -o) prev=output ;; -precious-files-regex) prev=precious_regex continue ;; -release) prev=release continue ;; -rpath) prev=rpath continue ;; -R) prev=xrpath continue ;; -R*) dir=`$echo "X$arg" | $Xsed -e 's/^-R//'` # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) $echo "$modename: only absolute run-paths are allowed" 1>&2 exit $EXIT_FAILURE ;; esac case "$xrpath " in *" $dir "*) ;; *) xrpath="$xrpath $dir" ;; esac continue ;; -static) # The effects of -static are defined in a previous loop. # We used to do the same as -all-static on platforms that # didn't have a PIC flag, but the assumption that the effects # would be equivalent was wrong. It would break on at least # Digital Unix and AIX. continue ;; -thread-safe) thread_safe=yes continue ;; -version-info) prev=vinfo continue ;; -version-number) prev=vinfo vinfo_number=yes continue ;; -Wc,*) args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wc,//'` arg= save_ifs="$IFS"; IFS=',' for flag in $args; do IFS="$save_ifs" case $flag in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") flag="\"$flag\"" ;; esac arg="$arg $wl$flag" compiler_flags="$compiler_flags $flag" done IFS="$save_ifs" arg=`$echo "X$arg" | $Xsed -e "s/^ //"` ;; -Wl,*) args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'` arg= save_ifs="$IFS"; IFS=',' for flag in $args; do IFS="$save_ifs" case $flag in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") flag="\"$flag\"" ;; esac arg="$arg $wl$flag" compiler_flags="$compiler_flags $wl$flag" linker_flags="$linker_flags $flag" done IFS="$save_ifs" arg=`$echo "X$arg" | $Xsed -e "s/^ //"` ;; -Xcompiler) prev=xcompiler continue ;; -Xlinker) prev=xlinker continue ;; -XCClinker) prev=xcclinker continue ;; # Some other compiler flag. -* | +*) # Unknown arguments in both finalize_command and compile_command need # to be aesthetically quoted because they are evaled later. arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac ;; *.$objext) # A standard object. objs="$objs $arg" ;; *.lo) # A libtool-controlled object. # Check to see that this really is a libtool object. if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then pic_object= non_pic_object= # Read the .lo file # If there is no directory component, then add one. case $arg in */* | *\\*) . $arg ;; *) . ./$arg ;; esac if test -z "$pic_object" || \ test -z "$non_pic_object" || test "$pic_object" = none && \ test "$non_pic_object" = none; then $echo "$modename: cannot find name of object for \`$arg'" 1>&2 exit $EXIT_FAILURE fi # Extract subdirectory from the argument. xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` if test "X$xdir" = "X$arg"; then xdir= else xdir="$xdir/" fi if test "$pic_object" != none; then # Prepend the subdirectory the object is found in. pic_object="$xdir$pic_object" if test "$prev" = dlfiles; then if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then dlfiles="$dlfiles $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test "$prev" = dlprefiles; then # Preload the old-style object. dlprefiles="$dlprefiles $pic_object" prev= fi # A PIC object. libobjs="$libobjs $pic_object" arg="$pic_object" fi # Non-PIC object. if test "$non_pic_object" != none; then # Prepend the subdirectory the object is found in. non_pic_object="$xdir$non_pic_object" # A standard non-PIC object non_pic_objects="$non_pic_objects $non_pic_object" if test -z "$pic_object" || test "$pic_object" = none ; then arg="$non_pic_object" fi fi else # Only an error if not doing a dry-run. if test -z "$run"; then $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 exit $EXIT_FAILURE else # Dry-run case. # Extract subdirectory from the argument. xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` if test "X$xdir" = "X$arg"; then xdir= else xdir="$xdir/" fi pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` libobjs="$libobjs $pic_object" non_pic_objects="$non_pic_objects $non_pic_object" fi fi ;; *.$libext) # An archive. deplibs="$deplibs $arg" old_deplibs="$old_deplibs $arg" continue ;; *.la) # A libtool-controlled library. if test "$prev" = dlfiles; then # This library was specified with -dlopen. dlfiles="$dlfiles $arg" prev= elif test "$prev" = dlprefiles; then # The library was specified with -dlpreopen. dlprefiles="$dlprefiles $arg" prev= else deplibs="$deplibs $arg" fi continue ;; # Some other compiler argument. *) # Unknown arguments in both finalize_command and compile_command need # to be aesthetically quoted because they are evaled later. arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") arg="\"$arg\"" ;; esac ;; esac # arg # Now actually substitute the argument into the commands. if test -n "$arg"; then compile_command="$compile_command $arg" finalize_command="$finalize_command $arg" fi done # argument parsing loop if test -n "$prev"; then $echo "$modename: the \`$prevarg' option requires an argument" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then eval arg=\"$export_dynamic_flag_spec\" compile_command="$compile_command $arg" finalize_command="$finalize_command $arg" fi oldlibs= # calculate the name of the file, without its directory outputname=`$echo "X$output" | $Xsed -e 's%^.*/%%'` libobjs_save="$libobjs" if test -n "$shlibpath_var"; then # get the directories listed in $shlibpath_var eval shlib_search_path=\`\$echo \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\` else shlib_search_path= fi eval sys_lib_search_path=\"$sys_lib_search_path_spec\" eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" output_objdir=`$echo "X$output" | $Xsed -e 's%/[^/]*$%%'` if test "X$output_objdir" = "X$output"; then output_objdir="$objdir" else output_objdir="$output_objdir/$objdir" fi # Create the object directory. if test ! -d "$output_objdir"; then $show "$mkdir $output_objdir" $run $mkdir $output_objdir status=$? if test "$status" -ne 0 && test ! -d "$output_objdir"; then exit $status fi fi # Determine the type of output case $output in "") $echo "$modename: you must specify an output file" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE ;; *.$libext) linkmode=oldlib ;; *.lo | *.$objext) linkmode=obj ;; *.la) linkmode=lib ;; *) linkmode=prog ;; # Anything else should be a program. esac case $host in *cygwin* | *mingw* | *pw32*) # don't eliminate duplications in $postdeps and $predeps duplicate_compiler_generated_deps=yes ;; *) duplicate_compiler_generated_deps=$duplicate_deps ;; esac specialdeplibs= libs= # Find all interdependent deplibs by searching for libraries # that are linked more than once (e.g. -la -lb -la) for deplib in $deplibs; do if test "X$duplicate_deps" = "Xyes" ; then case "$libs " in *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; esac fi libs="$libs $deplib" done if test "$linkmode" = lib; then libs="$predeps $libs $compiler_lib_search_path $postdeps" # Compute libraries that are listed more than once in $predeps # $postdeps and mark them as special (i.e., whose duplicates are # not to be eliminated). pre_post_deps= if test "X$duplicate_compiler_generated_deps" = "Xyes" ; then for pre_post_dep in $predeps $postdeps; do case "$pre_post_deps " in *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; esac pre_post_deps="$pre_post_deps $pre_post_dep" done fi pre_post_deps= fi deplibs= newdependency_libs= newlib_search_path= need_relink=no # whether we're linking any uninstalled libtool libraries notinst_deplibs= # not-installed libtool libraries notinst_path= # paths that contain not-installed libtool libraries case $linkmode in lib) passes="conv link" for file in $dlfiles $dlprefiles; do case $file in *.la) ;; *) $echo "$modename: libraries can \`-dlopen' only libtool libraries: $file" 1>&2 exit $EXIT_FAILURE ;; esac done ;; prog) compile_deplibs= finalize_deplibs= alldeplibs=no newdlfiles= newdlprefiles= passes="conv scan dlopen dlpreopen link" ;; *) passes="conv" ;; esac for pass in $passes; do if test "$linkmode,$pass" = "lib,link" || test "$linkmode,$pass" = "prog,scan"; then libs="$deplibs" deplibs= fi if test "$linkmode" = prog; then case $pass in dlopen) libs="$dlfiles" ;; dlpreopen) libs="$dlprefiles" ;; link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; esac fi if test "$pass" = dlopen; then # Collect dlpreopened libraries save_deplibs="$deplibs" deplibs= fi for deplib in $libs; do lib= found=no case $deplib in -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe) if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" fi continue ;; -l*) if test "$linkmode" != lib && test "$linkmode" != prog; then $echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2 continue fi if test "$pass" = conv; then deplibs="$deplib $deplibs" continue fi name=`$echo "X$deplib" | $Xsed -e 's/^-l//'` for searchdir in $newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path; do for search_ext in .la $std_shrext .so .a; do # Search the libtool library lib="$searchdir/lib${name}${search_ext}" if test -f "$lib"; then if test "$search_ext" = ".la"; then found=yes else found=no fi break 2 fi done done if test "$found" != yes; then # deplib doesn't seem to be a libtool library if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" fi continue else # deplib is a libtool library # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, # We need to do some special things here, and not later. if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then case " $predeps $postdeps " in *" $deplib "*) if (${SED} -e '2q' $lib | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then library_names= old_library= case $lib in */* | *\\*) . $lib ;; *) . ./$lib ;; esac for l in $old_library $library_names; do ll="$l" done if test "X$ll" = "X$old_library" ; then # only static version available found=no ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` test "X$ladir" = "X$lib" && ladir="." lib=$ladir/$old_library if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" fi continue fi fi ;; *) ;; esac fi fi ;; # -l -L*) case $linkmode in lib) deplibs="$deplib $deplibs" test "$pass" = conv && continue newdependency_libs="$deplib $newdependency_libs" newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` ;; prog) if test "$pass" = conv; then deplibs="$deplib $deplibs" continue fi if test "$pass" = scan; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` ;; *) $echo "$modename: warning: \`-L' is ignored for archives/objects" 1>&2 ;; esac # linkmode continue ;; # -L -R*) if test "$pass" = link; then dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'` # Make sure the xrpath contains only unique directories. case "$xrpath " in *" $dir "*) ;; *) xrpath="$xrpath $dir" ;; esac fi deplibs="$deplib $deplibs" continue ;; *.la) lib="$deplib" ;; *.$libext) if test "$pass" = conv; then deplibs="$deplib $deplibs" continue fi case $linkmode in lib) if test "$deplibs_check_method" != pass_all; then $echo $echo "*** Warning: Trying to link with static lib archive $deplib." $echo "*** I have the capability to make that library automatically link in when" $echo "*** you link to this library. But I can only do this if you have a" $echo "*** shared version of the library, which you do not appear to have" $echo "*** because the file extensions .$libext of this argument makes me believe" $echo "*** that it is just a static archive that I should not used here." else $echo $echo "*** Warning: Linking the shared library $output against the" $echo "*** static library $deplib is not portable!" deplibs="$deplib $deplibs" fi continue ;; prog) if test "$pass" != link; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi continue ;; esac # linkmode ;; # *.$libext *.lo | *.$objext) if test "$pass" = conv; then deplibs="$deplib $deplibs" elif test "$linkmode" = prog; then if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then # If there is no dlopen support or we're linking statically, # we need to preload. newdlprefiles="$newdlprefiles $deplib" compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else newdlfiles="$newdlfiles $deplib" fi fi continue ;; %DEPLIBS%) alldeplibs=yes continue ;; esac # case $deplib if test "$found" = yes || test -f "$lib"; then : else $echo "$modename: cannot find the library \`$lib'" 1>&2 exit $EXIT_FAILURE fi # Check to see that this really is a libtool archive. if (${SED} -e '2q' $lib | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : else $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 exit $EXIT_FAILURE fi ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` test "X$ladir" = "X$lib" && ladir="." dlname= dlopen= dlpreopen= libdir= library_names= old_library= # If the library was installed with an old release of libtool, # it will not redefine variables installed, or shouldnotlink installed=yes shouldnotlink=no # Read the .la file case $lib in */* | *\\*) . $lib ;; *) . ./$lib ;; esac if test "$linkmode,$pass" = "lib,link" || test "$linkmode,$pass" = "prog,scan" || { test "$linkmode" != prog && test "$linkmode" != lib; }; then test -n "$dlopen" && dlfiles="$dlfiles $dlopen" test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" fi if test "$pass" = conv; then # Only check for convenience libraries deplibs="$lib $deplibs" if test -z "$libdir"; then if test -z "$old_library"; then $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 exit $EXIT_FAILURE fi # It is a libtool convenience library, so add in its objects. convenience="$convenience $ladir/$objdir/$old_library" old_convenience="$old_convenience $ladir/$objdir/$old_library" tmp_libs= for deplib in $dependency_libs; do deplibs="$deplib $deplibs" if test "X$duplicate_deps" = "Xyes" ; then case "$tmp_libs " in *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; esac fi tmp_libs="$tmp_libs $deplib" done elif test "$linkmode" != prog && test "$linkmode" != lib; then $echo "$modename: \`$lib' is not a convenience library" 1>&2 exit $EXIT_FAILURE fi continue fi # $pass = conv # Get the name of the library we link against. linklib= for l in $old_library $library_names; do linklib="$l" done if test -z "$linklib"; then $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 exit $EXIT_FAILURE fi # This library was specified with -dlopen. if test "$pass" = dlopen; then if test -z "$libdir"; then $echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2 exit $EXIT_FAILURE fi if test -z "$dlname" || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then # If there is no dlname, no dlopen support or we're linking # statically, we need to preload. We also need to preload any # dependent libraries so libltdl's deplib preloader doesn't # bomb out in the load deplibs phase. dlprefiles="$dlprefiles $lib $dependency_libs" else newdlfiles="$newdlfiles $lib" fi continue fi # $pass = dlopen # We need an absolute path. case $ladir in [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; *) abs_ladir=`cd "$ladir" && pwd` if test -z "$abs_ladir"; then $echo "$modename: warning: cannot determine absolute directory name of \`$ladir'" 1>&2 $echo "$modename: passing it literally to the linker, although it might fail" 1>&2 abs_ladir="$ladir" fi ;; esac laname=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` # Find the relevant object directory and library name. if test "X$installed" = Xyes; then if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then $echo "$modename: warning: library \`$lib' was moved." 1>&2 dir="$ladir" absdir="$abs_ladir" libdir="$abs_ladir" else dir="$libdir" absdir="$libdir" fi else dir="$ladir/$objdir" absdir="$abs_ladir/$objdir" # Remove this search path later notinst_path="$notinst_path $abs_ladir" fi # $installed = yes name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'` # This library was specified with -dlpreopen. if test "$pass" = dlpreopen; then if test -z "$libdir"; then $echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2 exit $EXIT_FAILURE fi # Prefer using a static library (so that no silly _DYNAMIC symbols # are required to link). if test -n "$old_library"; then newdlprefiles="$newdlprefiles $dir/$old_library" # Otherwise, use the dlname, so that lt_dlopen finds it. elif test -n "$dlname"; then newdlprefiles="$newdlprefiles $dir/$dlname" else newdlprefiles="$newdlprefiles $dir/$linklib" fi fi # $pass = dlpreopen if test -z "$libdir"; then # Link the convenience library if test "$linkmode" = lib; then deplibs="$dir/$old_library $deplibs" elif test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$dir/$old_library $compile_deplibs" finalize_deplibs="$dir/$old_library $finalize_deplibs" else deplibs="$lib $deplibs" # used for prog,scan pass fi continue fi if test "$linkmode" = prog && test "$pass" != link; then newlib_search_path="$newlib_search_path $ladir" deplibs="$lib $deplibs" linkalldeplibs=no if test "$link_all_deplibs" != no || test -z "$library_names" || test "$build_libtool_libs" = no; then linkalldeplibs=yes fi tmp_libs= for deplib in $dependency_libs; do case $deplib in -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test esac # Need to link against all dependency_libs? if test "$linkalldeplibs" = yes; then deplibs="$deplib $deplibs" else # Need to hardcode shared library paths # or/and link against static libraries newdependency_libs="$deplib $newdependency_libs" fi if test "X$duplicate_deps" = "Xyes" ; then case "$tmp_libs " in *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; esac fi tmp_libs="$tmp_libs $deplib" done # for deplib continue fi # $linkmode = prog... if test "$linkmode,$pass" = "prog,link"; then if test -n "$library_names" && { test "$prefer_static_libs" = no || test -z "$old_library"; }; then # We need to hardcode the library path if test -n "$shlibpath_var"; then # Make sure the rpath contains only unique directories. case "$temp_rpath " in *" $dir "*) ;; *" $absdir "*) ;; *) temp_rpath="$temp_rpath $dir" ;; esac fi # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) compile_rpath="$compile_rpath $absdir" esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) finalize_rpath="$finalize_rpath $libdir" esac ;; esac fi # $linkmode,$pass = prog,link... if test "$alldeplibs" = yes && { test "$deplibs_check_method" = pass_all || { test "$build_libtool_libs" = yes && test -n "$library_names"; }; }; then # We only need to search for static libraries continue fi fi link_static=no # Whether the deplib will be linked statically if test -n "$library_names" && { test "$prefer_static_libs" = no || test -z "$old_library"; }; then if test "$installed" = no; then notinst_deplibs="$notinst_deplibs $lib" need_relink=yes fi # This is a shared library # Warn about portability, can't link against -module's on # some systems (darwin) if test "$shouldnotlink" = yes && test "$pass" = link ; then $echo if test "$linkmode" = prog; then $echo "*** Warning: Linking the executable $output against the loadable module" else $echo "*** Warning: Linking the shared library $output against the loadable module" fi $echo "*** $linklib is not portable!" fi if test "$linkmode" = lib && test "$hardcode_into_libs" = yes; then # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) compile_rpath="$compile_rpath $absdir" esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) finalize_rpath="$finalize_rpath $libdir" esac ;; esac fi if test -n "$old_archive_from_expsyms_cmds"; then # figure out the soname set dummy $library_names realname="$2" shift; shift libname=`eval \\$echo \"$libname_spec\"` # use dlname if we got it. it's perfectly good, no? if test -n "$dlname"; then soname="$dlname" elif test -n "$soname_spec"; then # bleh windows case $host in *cygwin* | mingw*) major=`expr $current - $age` versuffix="-$major" ;; esac eval soname=\"$soname_spec\" else soname="$realname" fi # Make a new name for the extract_expsyms_cmds to use soroot="$soname" soname=`$echo $soroot | ${SED} -e 's/^.*\///'` newlib="libimp-`$echo $soname | ${SED} 's/^lib//;s/\.dll$//'`.a" # If the library has no export list, then create one now if test -f "$output_objdir/$soname-def"; then : else $show "extracting exported symbol list from \`$soname'" save_ifs="$IFS"; IFS='~' cmds=$extract_expsyms_cmds for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" fi # Create $newlib if test -f "$output_objdir/$newlib"; then :; else $show "generating import library for \`$soname'" save_ifs="$IFS"; IFS='~' cmds=$old_archive_from_expsyms_cmds for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" fi # make sure the library variables are pointing to the new library dir=$output_objdir linklib=$newlib fi # test -n "$old_archive_from_expsyms_cmds" if test "$linkmode" = prog || test "$mode" != relink; then add_shlibpath= add_dir= add= lib_linked=yes case $hardcode_action in immediate | unsupported) if test "$hardcode_direct" = no; then add="$dir/$linklib" case $host in *-*-sco3.2v5* ) add_dir="-L$dir" ;; *-*-darwin* ) # if the lib is a module then we can not link against # it, someone is ignoring the new warnings I added if /usr/bin/file -L $add 2> /dev/null | $EGREP "bundle" >/dev/null ; then $echo "** Warning, lib $linklib is a module, not a shared library" if test -z "$old_library" ; then $echo $echo "** And there doesn't seem to be a static archive available" $echo "** The link will probably fail, sorry" else add="$dir/$old_library" fi fi esac elif test "$hardcode_minus_L" = no; then case $host in *-*-sunos*) add_shlibpath="$dir" ;; esac add_dir="-L$dir" add="-l$name" elif test "$hardcode_shlibpath_var" = no; then add_shlibpath="$dir" add="-l$name" else lib_linked=no fi ;; relink) if test "$hardcode_direct" = yes; then add="$dir/$linklib" elif test "$hardcode_minus_L" = yes; then add_dir="-L$dir" # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case "$libdir" in [\\/]*) add_dir="$add_dir -L$inst_prefix_dir$libdir" ;; esac fi add="-l$name" elif test "$hardcode_shlibpath_var" = yes; then add_shlibpath="$dir" add="-l$name" else lib_linked=no fi ;; *) lib_linked=no ;; esac if test "$lib_linked" != yes; then $echo "$modename: configuration error: unsupported hardcode properties" exit $EXIT_FAILURE fi if test -n "$add_shlibpath"; then case :$compile_shlibpath: in *":$add_shlibpath:"*) ;; *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; esac fi if test "$linkmode" = prog; then test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" test -n "$add" && compile_deplibs="$add $compile_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" if test "$hardcode_direct" != yes && \ test "$hardcode_minus_L" != yes && \ test "$hardcode_shlibpath_var" = yes; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; esac fi fi fi if test "$linkmode" = prog || test "$mode" = relink; then add_shlibpath= add_dir= add= # Finalize command for both is simple: just hardcode it. if test "$hardcode_direct" = yes; then add="$libdir/$linklib" elif test "$hardcode_minus_L" = yes; then add_dir="-L$libdir" add="-l$name" elif test "$hardcode_shlibpath_var" = yes; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; esac add="-l$name" elif test "$hardcode_automatic" = yes; then if test -n "$inst_prefix_dir" && test -f "$inst_prefix_dir$libdir/$linklib" ; then add="$inst_prefix_dir$libdir/$linklib" else add="$libdir/$linklib" fi else # We cannot seem to hardcode it, guess we'll fake it. add_dir="-L$libdir" # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case "$libdir" in [\\/]*) add_dir="$add_dir -L$inst_prefix_dir$libdir" ;; esac fi add="-l$name" fi if test "$linkmode" = prog; then test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" test -n "$add" && finalize_deplibs="$add $finalize_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" fi fi elif test "$linkmode" = prog; then # Here we assume that one of hardcode_direct or hardcode_minus_L # is not unsupported. This is valid on all known static and # shared platforms. if test "$hardcode_direct" != unsupported; then test -n "$old_library" && linklib="$old_library" compile_deplibs="$dir/$linklib $compile_deplibs" finalize_deplibs="$dir/$linklib $finalize_deplibs" else compile_deplibs="-l$name -L$dir $compile_deplibs" finalize_deplibs="-l$name -L$dir $finalize_deplibs" fi elif test "$build_libtool_libs" = yes; then # Not a shared library if test "$deplibs_check_method" != pass_all; then # We're trying link a shared library against a static one # but the system doesn't support it. # Just print a warning and add the library to dependency_libs so # that the program can be linked against the static library. $echo $echo "*** Warning: This system can not link to static lib archive $lib." $echo "*** I have the capability to make that library automatically link in when" $echo "*** you link to this library. But I can only do this if you have a" $echo "*** shared version of the library, which you do not appear to have." if test "$module" = yes; then $echo "*** But as you try to build a module library, libtool will still create " $echo "*** a static module, that should work as long as the dlopening application" $echo "*** is linked with the -dlopen flag to resolve symbols at runtime." if test -z "$global_symbol_pipe"; then $echo $echo "*** However, this would only work if libtool was able to extract symbol" $echo "*** lists from a program, using \`nm' or equivalent, but libtool could" $echo "*** not find such a program. So, this module is probably useless." $echo "*** \`nm' from GNU binutils and a full rebuild may help." fi if test "$build_old_libs" = no; then build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi else convenience="$convenience $dir/$old_library" old_convenience="$old_convenience $dir/$old_library" deplibs="$dir/$old_library $deplibs" link_static=yes fi fi # link shared/static library? if test "$linkmode" = lib; then if test -n "$dependency_libs" && { test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes || test "$link_static" = yes; }; then # Extract -R from dependency_libs temp_deplibs= for libdir in $dependency_libs; do case $libdir in -R*) temp_xrpath=`$echo "X$libdir" | $Xsed -e 's/^-R//'` case " $xrpath " in *" $temp_xrpath "*) ;; *) xrpath="$xrpath $temp_xrpath";; esac;; *) temp_deplibs="$temp_deplibs $libdir";; esac done dependency_libs="$temp_deplibs" fi newlib_search_path="$newlib_search_path $absdir" # Link against this library test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" # ... and its dependency_libs tmp_libs= for deplib in $dependency_libs; do newdependency_libs="$deplib $newdependency_libs" if test "X$duplicate_deps" = "Xyes" ; then case "$tmp_libs " in *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; esac fi tmp_libs="$tmp_libs $deplib" done if test "$link_all_deplibs" != no; then # Add the search paths of all dependency libraries for deplib in $dependency_libs; do case $deplib in -L*) path="$deplib" ;; *.la) dir=`$echo "X$deplib" | $Xsed -e 's%/[^/]*$%%'` test "X$dir" = "X$deplib" && dir="." # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; *) absdir=`cd "$dir" && pwd` if test -z "$absdir"; then $echo "$modename: warning: cannot determine absolute directory name of \`$dir'" 1>&2 absdir="$dir" fi ;; esac if grep "^installed=no" $deplib > /dev/null; then path="$absdir/$objdir" else eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` if test -z "$libdir"; then $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2 exit $EXIT_FAILURE fi if test "$absdir" != "$libdir"; then $echo "$modename: warning: \`$deplib' seems to be moved" 1>&2 fi path="$absdir" fi depdepl= case $host in *-*-darwin*) # we do not want to link against static libs, # but need to link against shared eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` if test -n "$deplibrary_names" ; then for tmp in $deplibrary_names ; do depdepl=$tmp done if test -f "$path/$depdepl" ; then depdepl="$path/$depdepl" fi # do not add paths which are already there case " $newlib_search_path " in *" $path "*) ;; *) newlib_search_path="$newlib_search_path $path";; esac fi path="" ;; *) path="-L$path" ;; esac ;; -l*) case $host in *-*-darwin*) # Again, we only want to link against shared libraries eval tmp_libs=`$echo "X$deplib" | $Xsed -e "s,^\-l,,"` for tmp in $newlib_search_path ; do if test -f "$tmp/lib$tmp_libs.dylib" ; then eval depdepl="$tmp/lib$tmp_libs.dylib" break fi done path="" ;; *) continue ;; esac ;; *) continue ;; esac case " $deplibs " in *" $depdepl "*) ;; *) deplibs="$depdepl $deplibs" ;; esac case " $deplibs " in *" $path "*) ;; *) deplibs="$deplibs $path" ;; esac done fi # link_all_deplibs != no fi # linkmode = lib done # for deplib in $libs dependency_libs="$newdependency_libs" if test "$pass" = dlpreopen; then # Link the dlpreopened libraries before other libraries for deplib in $save_deplibs; do deplibs="$deplib $deplibs" done fi if test "$pass" != dlopen; then if test "$pass" != conv; then # Make sure lib_search_path contains only unique directories. lib_search_path= for dir in $newlib_search_path; do case "$lib_search_path " in *" $dir "*) ;; *) lib_search_path="$lib_search_path $dir" ;; esac done newlib_search_path= fi if test "$linkmode,$pass" != "prog,link"; then vars="deplibs" else vars="compile_deplibs finalize_deplibs" fi for var in $vars dependency_libs; do # Add libraries to $var in reverse order eval tmp_libs=\"\$$var\" new_libs= for deplib in $tmp_libs; do # FIXME: Pedantically, this is the right thing to do, so # that some nasty dependency loop isn't accidentally # broken: #new_libs="$deplib $new_libs" # Pragmatically, this seems to cause very few problems in # practice: case $deplib in -L*) new_libs="$deplib $new_libs" ;; -R*) ;; *) # And here is the reason: when a library appears more # than once as an explicit dependence of a library, or # is implicitly linked in more than once by the # compiler, it is considered special, and multiple # occurrences thereof are not removed. Compare this # with having the same library being listed as a # dependency of multiple other libraries: in this case, # we know (pedantically, we assume) the library does not # need to be listed more than once, so we keep only the # last copy. This is not always right, but it is rare # enough that we require users that really mean to play # such unportable linking tricks to link the library # using -Wl,-lname, so that libtool does not consider it # for duplicate removal. case " $specialdeplibs " in *" $deplib "*) new_libs="$deplib $new_libs" ;; *) case " $new_libs " in *" $deplib "*) ;; *) new_libs="$deplib $new_libs" ;; esac ;; esac ;; esac done tmp_libs= for deplib in $new_libs; do case $deplib in -L*) case " $tmp_libs " in *" $deplib "*) ;; *) tmp_libs="$tmp_libs $deplib" ;; esac ;; *) tmp_libs="$tmp_libs $deplib" ;; esac done eval $var=\"$tmp_libs\" done # for var fi # Last step: remove runtime libs from dependency_libs # (they stay in deplibs) tmp_libs= for i in $dependency_libs ; do case " $predeps $postdeps $compiler_lib_search_path " in *" $i "*) i="" ;; esac if test -n "$i" ; then tmp_libs="$tmp_libs $i" fi done dependency_libs=$tmp_libs done # for pass if test "$linkmode" = prog; then dlfiles="$newdlfiles" dlprefiles="$newdlprefiles" fi case $linkmode in oldlib) if test -n "$deplibs"; then $echo "$modename: warning: \`-l' and \`-L' are ignored for archives" 1>&2 fi if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then $echo "$modename: warning: \`-dlopen' is ignored for archives" 1>&2 fi if test -n "$rpath"; then $echo "$modename: warning: \`-rpath' is ignored for archives" 1>&2 fi if test -n "$xrpath"; then $echo "$modename: warning: \`-R' is ignored for archives" 1>&2 fi if test -n "$vinfo"; then $echo "$modename: warning: \`-version-info/-version-number' is ignored for archives" 1>&2 fi if test -n "$release"; then $echo "$modename: warning: \`-release' is ignored for archives" 1>&2 fi if test -n "$export_symbols" || test -n "$export_symbols_regex"; then $echo "$modename: warning: \`-export-symbols' is ignored for archives" 1>&2 fi # Now set the variables for building old libraries. build_libtool_libs=no oldlibs="$output" objs="$objs$old_deplibs" ;; lib) # Make sure we only generate libraries of the form `libNAME.la'. case $outputname in lib*) name=`$echo "X$outputname" | $Xsed -e 's/\.la$//' -e 's/^lib//'` eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" ;; *) if test "$module" = no; then $echo "$modename: libtool library \`$output' must begin with \`lib'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi if test "$need_lib_prefix" != no; then # Add the "lib" prefix for modules if required name=`$echo "X$outputname" | $Xsed -e 's/\.la$//'` eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" else libname=`$echo "X$outputname" | $Xsed -e 's/\.la$//'` fi ;; esac if test -n "$objs"; then if test "$deplibs_check_method" != pass_all; then $echo "$modename: cannot build libtool library \`$output' from non-libtool objects on this host:$objs" 2>&1 exit $EXIT_FAILURE else $echo $echo "*** Warning: Linking the shared library $output against the non-libtool" $echo "*** objects $objs is not portable!" libobjs="$libobjs $objs" fi fi if test "$dlself" != no; then $echo "$modename: warning: \`-dlopen self' is ignored for libtool libraries" 1>&2 fi set dummy $rpath if test "$#" -gt 2; then $echo "$modename: warning: ignoring multiple \`-rpath's for a libtool library" 1>&2 fi install_libdir="$2" oldlibs= if test -z "$rpath"; then if test "$build_libtool_libs" = yes; then # Building a libtool convenience library. # Some compilers have problems with a `.al' extension so # convenience libraries should have the same extension an # archive normally would. oldlibs="$output_objdir/$libname.$libext $oldlibs" build_libtool_libs=convenience build_old_libs=yes fi if test -n "$vinfo"; then $echo "$modename: warning: \`-version-info/-version-number' is ignored for convenience libraries" 1>&2 fi if test -n "$release"; then $echo "$modename: warning: \`-release' is ignored for convenience libraries" 1>&2 fi else # Parse the version information argument. save_ifs="$IFS"; IFS=':' set dummy $vinfo 0 0 0 IFS="$save_ifs" if test -n "$8"; then $echo "$modename: too many parameters to \`-version-info'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi # convert absolute version numbers to libtool ages # this retains compatibility with .la files and attempts # to make the code below a bit more comprehensible case $vinfo_number in yes) number_major="$2" number_minor="$3" number_revision="$4" # # There are really only two kinds -- those that # use the current revision as the major version # and those that subtract age and use age as # a minor version. But, then there is irix # which has an extra 1 added just for fun # case $version_type in darwin|linux|osf|windows) current=`expr $number_major + $number_minor` age="$number_minor" revision="$number_revision" ;; freebsd-aout|freebsd-elf|sunos) current="$number_major" revision="$number_minor" age="0" ;; irix|nonstopux) current=`expr $number_major + $number_minor - 1` age="$number_minor" revision="$number_minor" ;; esac ;; no) current="$2" revision="$3" age="$4" ;; esac # Check that each of the things are valid numbers. case $current in [0-9]*) ;; *) $echo "$modename: CURRENT \`$current' is not a nonnegative integer" 1>&2 $echo "$modename: \`$vinfo' is not valid version information" 1>&2 exit $EXIT_FAILURE ;; esac case $revision in [0-9]*) ;; *) $echo "$modename: REVISION \`$revision' is not a nonnegative integer" 1>&2 $echo "$modename: \`$vinfo' is not valid version information" 1>&2 exit $EXIT_FAILURE ;; esac case $age in [0-9]*) ;; *) $echo "$modename: AGE \`$age' is not a nonnegative integer" 1>&2 $echo "$modename: \`$vinfo' is not valid version information" 1>&2 exit $EXIT_FAILURE ;; esac if test "$age" -gt "$current"; then $echo "$modename: AGE \`$age' is greater than the current interface number \`$current'" 1>&2 $echo "$modename: \`$vinfo' is not valid version information" 1>&2 exit $EXIT_FAILURE fi # Calculate the version variables. major= versuffix= verstring= case $version_type in none) ;; darwin) # Like Linux, but with the current version available in # verstring for coding it into the library header major=.`expr $current - $age` versuffix="$major.$age.$revision" # Darwin ld doesn't like 0 for these options... minor_current=`expr $current + 1` verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" ;; freebsd-aout) major=".$current" versuffix=".$current.$revision"; ;; freebsd-elf) major=".$current" versuffix=".$current"; ;; irix | nonstopux) major=`expr $current - $age + 1` case $version_type in nonstopux) verstring_prefix=nonstopux ;; *) verstring_prefix=sgi ;; esac verstring="$verstring_prefix$major.$revision" # Add in all the interfaces that we are compatible with. loop=$revision while test "$loop" -ne 0; do iface=`expr $revision - $loop` loop=`expr $loop - 1` verstring="$verstring_prefix$major.$iface:$verstring" done # Before this point, $major must not contain `.'. major=.$major versuffix="$major.$revision" ;; linux) major=.`expr $current - $age` versuffix="$major.$age.$revision" ;; osf) major=.`expr $current - $age` versuffix=".$current.$age.$revision" verstring="$current.$age.$revision" # Add in all the interfaces that we are compatible with. loop=$age while test "$loop" -ne 0; do iface=`expr $current - $loop` loop=`expr $loop - 1` verstring="$verstring:${iface}.0" done # Make executables depend on our current version. verstring="$verstring:${current}.0" ;; sunos) major=".$current" versuffix=".$current.$revision" ;; windows) # Use '-' rather than '.', since we only want one # extension on DOS 8.3 filesystems. major=`expr $current - $age` versuffix="-$major" ;; *) $echo "$modename: unknown library version type \`$version_type'" 1>&2 $echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2 exit $EXIT_FAILURE ;; esac # Clear the version info if we defaulted, and they specified a release. if test -z "$vinfo" && test -n "$release"; then major= case $version_type in darwin) # we can't check for "0.0" in archive_cmds due to quoting # problems, so we reset it completely verstring= ;; *) verstring="0.0" ;; esac if test "$need_version" = no; then versuffix= else versuffix=".0.0" fi fi # Remove version info from name if versioning should be avoided if test "$avoid_version" = yes && test "$need_version" = no; then major= versuffix= verstring="" fi # Check to see if the archive will have undefined symbols. if test "$allow_undefined" = yes; then if test "$allow_undefined_flag" = unsupported; then $echo "$modename: warning: undefined symbols not allowed in $host shared libraries" 1>&2 build_libtool_libs=no build_old_libs=yes fi else # Don't allow undefined symbols. allow_undefined_flag="$no_undefined_flag" fi fi if test "$mode" != relink; then # Remove our outputs, but don't remove object files since they # may have been created when compiling PIC objects. removelist= tempremovelist=`$echo "$output_objdir/*"` for p in $tempremovelist; do case $p in *.$objext) ;; $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) if test "X$precious_files_regex" != "X"; then if echo $p | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 then continue fi fi removelist="$removelist $p" ;; *) ;; esac done if test -n "$removelist"; then $show "${rm}r $removelist" $run ${rm}r $removelist fi fi # Now set the variables for building old libraries. if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then oldlibs="$oldlibs $output_objdir/$libname.$libext" # Transform .lo files to .o files. oldobjs="$objs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP` fi # Eliminate all temporary directories. for path in $notinst_path; do lib_search_path=`$echo "$lib_search_path " | ${SED} -e 's% $path % %g'` deplibs=`$echo "$deplibs " | ${SED} -e 's% -L$path % %g'` dependency_libs=`$echo "$dependency_libs " | ${SED} -e 's% -L$path % %g'` done if test -n "$xrpath"; then # If the user specified any rpath flags, then add them. temp_xrpath= for libdir in $xrpath; do temp_xrpath="$temp_xrpath -R$libdir" case "$finalize_rpath " in *" $libdir "*) ;; *) finalize_rpath="$finalize_rpath $libdir" ;; esac done if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then dependency_libs="$temp_xrpath $dependency_libs" fi fi # Make sure dlfiles contains only unique files that won't be dlpreopened old_dlfiles="$dlfiles" dlfiles= for lib in $old_dlfiles; do case " $dlprefiles $dlfiles " in *" $lib "*) ;; *) dlfiles="$dlfiles $lib" ;; esac done # Make sure dlprefiles contains only unique files old_dlprefiles="$dlprefiles" dlprefiles= for lib in $old_dlprefiles; do case "$dlprefiles " in *" $lib "*) ;; *) dlprefiles="$dlprefiles $lib" ;; esac done if test "$build_libtool_libs" = yes; then if test -n "$rpath"; then case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos*) # these systems don't actually have a c library (as such)! ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C library is in the System framework deplibs="$deplibs -framework System" ;; *-*-netbsd*) # Don't link with libc until the a.out ld.so is fixed. ;; *-*-openbsd* | *-*-freebsd*) # Do not include libc due to us having libc/libc_r. test "X$arg" = "X-lc" && continue ;; *) # Add libc to deplibs on all other systems if necessary. if test "$build_libtool_need_lc" = "yes"; then deplibs="$deplibs -lc" fi ;; esac fi # Transform deplibs into only deplibs that can be linked in shared. name_save=$name libname_save=$libname release_save=$release versuffix_save=$versuffix major_save=$major # I'm not sure if I'm treating the release correctly. I think # release should show up in the -l (ie -lgmp5) so we don't want to # add it in twice. Is that correct? release="" versuffix="" major="" newdeplibs= droppeddeps=no case $deplibs_check_method in pass_all) # Don't check for shared/static. Everything works. # This might be a little naive. We might want to check # whether the library exists or not. But this is on # osf3 & osf4 and I'm not really sure... Just # implementing what was already the behavior. newdeplibs=$deplibs ;; test_compile) # This code stresses the "libraries are programs" paradigm to its # limits. Maybe even breaks it. We compile a program, linking it # against the deplibs as a proxy for the library. Then we can check # whether they linked in statically or dynamically with ldd. $rm conftest.c cat > conftest.c </dev/null` for potent_lib in $potential_libs; do # Follow soft links. if ls -lLd "$potent_lib" 2>/dev/null \ | grep " -> " >/dev/null; then continue fi # The statement above tries to avoid entering an # endless loop below, in case of cyclic links. # We might still enter an endless loop, since a link # loop can be closed while we follow links, # but so what? potlib="$potent_lib" while test -h "$potlib" 2>/dev/null; do potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` case $potliblink in [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; *) potlib=`$echo "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";; esac done if eval $file_magic_cmd \"\$potlib\" 2>/dev/null \ | ${SED} 10q \ | $EGREP "$file_magic_regex" > /dev/null; then newdeplibs="$newdeplibs $a_deplib" a_deplib="" break 2 fi done done fi if test -n "$a_deplib" ; then droppeddeps=yes $echo $echo "*** Warning: linker path does not have real file for library $a_deplib." $echo "*** I have the capability to make that library automatically link in when" $echo "*** you link to this library. But I can only do this if you have a" $echo "*** shared version of the library, which you do not appear to have" $echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib" ; then $echo "*** with $libname but no candidates were found. (...for file magic test)" else $echo "*** with $libname and none of the candidates passed a file format test" $echo "*** using a file magic. Last file checked: $potlib" fi fi else # Add a -L argument. newdeplibs="$newdeplibs $a_deplib" fi done # Gone through all deplibs. ;; match_pattern*) set dummy $deplibs_check_method match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"` for a_deplib in $deplibs; do name="`expr $a_deplib : '-l\(.*\)'`" # If $name is empty we are operating on a -L argument. if test -n "$name" && test "$name" != "0"; then if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then case " $predeps $postdeps " in *" $a_deplib "*) newdeplibs="$newdeplibs $a_deplib" a_deplib="" ;; esac fi if test -n "$a_deplib" ; then libname=`eval \\$echo \"$libname_spec\"` for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do potential_libs=`ls $i/$libname[.-]* 2>/dev/null` for potent_lib in $potential_libs; do potlib="$potent_lib" # see symlink-check above in file_magic test if eval $echo \"$potent_lib\" 2>/dev/null \ | ${SED} 10q \ | $EGREP "$match_pattern_regex" > /dev/null; then newdeplibs="$newdeplibs $a_deplib" a_deplib="" break 2 fi done done fi if test -n "$a_deplib" ; then droppeddeps=yes $echo $echo "*** Warning: linker path does not have real file for library $a_deplib." $echo "*** I have the capability to make that library automatically link in when" $echo "*** you link to this library. But I can only do this if you have a" $echo "*** shared version of the library, which you do not appear to have" $echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib" ; then $echo "*** with $libname but no candidates were found. (...for regex pattern test)" else $echo "*** with $libname and none of the candidates passed a file format test" $echo "*** using a regex pattern. Last file checked: $potlib" fi fi else # Add a -L argument. newdeplibs="$newdeplibs $a_deplib" fi done # Gone through all deplibs. ;; none | unknown | *) newdeplibs="" tmp_deplibs=`$echo "X $deplibs" | $Xsed -e 's/ -lc$//' \ -e 's/ -[LR][^ ]*//g'` if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then for i in $predeps $postdeps ; do # can't use Xsed below, because $i might contain '/' tmp_deplibs=`$echo "X $tmp_deplibs" | ${SED} -e "1s,^X,," -e "s,$i,,"` done fi if $echo "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' \ | grep . >/dev/null; then $echo if test "X$deplibs_check_method" = "Xnone"; then $echo "*** Warning: inter-library dependencies are not supported in this platform." else $echo "*** Warning: inter-library dependencies are not known to be supported." fi $echo "*** All declared inter-library dependencies are being dropped." droppeddeps=yes fi ;; esac versuffix=$versuffix_save major=$major_save release=$release_save libname=$libname_save name=$name_save case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library is the System framework newdeplibs=`$echo "X $newdeplibs" | $Xsed -e 's/ -lc / -framework System /'` ;; esac if test "$droppeddeps" = yes; then if test "$module" = yes; then $echo $echo "*** Warning: libtool could not satisfy all declared inter-library" $echo "*** dependencies of module $libname. Therefore, libtool will create" $echo "*** a static module, that should work as long as the dlopening" $echo "*** application is linked with the -dlopen flag." if test -z "$global_symbol_pipe"; then $echo $echo "*** However, this would only work if libtool was able to extract symbol" $echo "*** lists from a program, using \`nm' or equivalent, but libtool could" $echo "*** not find such a program. So, this module is probably useless." $echo "*** \`nm' from GNU binutils and a full rebuild may help." fi if test "$build_old_libs" = no; then oldlibs="$output_objdir/$libname.$libext" build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi else $echo "*** The inter-library dependencies that have been dropped here will be" $echo "*** automatically added whenever a program is linked with this library" $echo "*** or is declared to -dlopen it." if test "$allow_undefined" = no; then $echo $echo "*** Since this library must not contain undefined symbols," $echo "*** because either the platform does not support them or" $echo "*** it was explicitly requested with -no-undefined," $echo "*** libtool will only create a static version of it." if test "$build_old_libs" = no; then oldlibs="$output_objdir/$libname.$libext" build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi fi fi # Done checking deplibs! deplibs=$newdeplibs fi # All the library-specific variables (install_libdir is set above). library_names= old_library= dlname= # Test again, we may have decided not to build it any more if test "$build_libtool_libs" = yes; then if test "$hardcode_into_libs" = yes; then # Hardcode the library paths hardcode_libdirs= dep_rpath= rpath="$finalize_rpath" test "$mode" != relink && rpath="$compile_rpath$rpath" for libdir in $rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" dep_rpath="$dep_rpath $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) perm_rpath="$perm_rpath $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" if test -n "$hardcode_libdir_flag_spec_ld"; then eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" else eval dep_rpath=\"$hardcode_libdir_flag_spec\" fi fi if test -n "$runpath_var" && test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do rpath="$rpath$dir:" done eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" fi test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" fi shlibpath="$finalize_shlibpath" test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" if test -n "$shlibpath"; then eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" fi # Get the real and link names of the library. eval shared_ext=\"$shrext_cmds\" eval library_names=\"$library_names_spec\" set dummy $library_names realname="$2" shift; shift if test -n "$soname_spec"; then eval soname=\"$soname_spec\" else soname="$realname" fi if test -z "$dlname"; then dlname=$soname fi lib="$output_objdir/$realname" for link do linknames="$linknames $link" done # Use standard objects if they are pic test -z "$pic_flag" && libobjs=`$echo "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` # Prepare the list of exported symbols if test -z "$export_symbols"; then if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then $show "generating symbol list for \`$libname.la'" export_symbols="$output_objdir/$libname.exp" $run $rm $export_symbols cmds=$export_symbols_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" if len=`expr "X$cmd" : ".*"` && test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then $show "$cmd" $run eval "$cmd" || exit $? skipped_export=false else # The command line is too long to execute in one step. $show "using reloadable object file for export list..." skipped_export=: fi done IFS="$save_ifs" if test -n "$export_symbols_regex"; then $show "$EGREP -e \"$export_symbols_regex\" \"$export_symbols\" > \"${export_symbols}T\"" $run eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' $show "$mv \"${export_symbols}T\" \"$export_symbols\"" $run eval '$mv "${export_symbols}T" "$export_symbols"' fi fi fi if test -n "$export_symbols" && test -n "$include_expsyms"; then $run eval '$echo "X$include_expsyms" | $SP2NL >> "$export_symbols"' fi tmp_deplibs= for test_deplib in $deplibs; do case " $convenience " in *" $test_deplib "*) ;; *) tmp_deplibs="$tmp_deplibs $test_deplib" ;; esac done deplibs="$tmp_deplibs" if test -n "$convenience"; then if test -n "$whole_archive_flag_spec"; then save_libobjs=$libobjs eval libobjs=\"\$libobjs $whole_archive_flag_spec\" else gentop="$output_objdir/${outputname}x" $show "${rm}r $gentop" $run ${rm}r "$gentop" $show "$mkdir $gentop" $run $mkdir "$gentop" status=$? if test "$status" -ne 0 && test ! -d "$gentop"; then exit $status fi generated="$generated $gentop" for xlib in $convenience; do # Extract the objects. case $xlib in [\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;; *) xabs=`pwd`"/$xlib" ;; esac xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'` xdir="$gentop/$xlib" $show "${rm}r $xdir" $run ${rm}r "$xdir" $show "$mkdir $xdir" $run $mkdir "$xdir" status=$? if test "$status" -ne 0 && test ! -d "$xdir"; then exit $status fi # We will extract separately just the conflicting names and we will no # longer touch any unique names. It is faster to leave these extract # automatically by $AR in one run. $show "(cd $xdir && $AR x $xabs)" $run eval "(cd \$xdir && $AR x \$xabs)" || exit $? if ($AR t "$xabs" | sort | sort -uc >/dev/null 2>&1); then : else $echo "$modename: warning: object name conflicts; renaming object files" 1>&2 $echo "$modename: warning: to ensure that they will not overwrite" 1>&2 $AR t "$xabs" | sort | uniq -cd | while read -r count name do i=1 while test "$i" -le "$count" do # Put our $i before any first dot (extension) # Never overwrite any file name_to="$name" while test "X$name_to" = "X$name" || test -f "$xdir/$name_to" do name_to=`$echo "X$name_to" | $Xsed -e "s/\([^.]*\)/\1-$i/"` done $show "(cd $xdir && $AR xN $i $xabs '$name' && $mv '$name' '$name_to')" $run eval "(cd \$xdir && $AR xN $i \$xabs '$name' && $mv '$name' '$name_to')" || exit $? i=`expr $i + 1` done done fi libobjs="$libobjs "`find $xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` done fi fi if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then eval flag=\"$thread_safe_flag_spec\" linker_flags="$linker_flags $flag" fi # Make a backup of the uninstalled library when relinking if test "$mode" = relink; then $run eval '(cd $output_objdir && $rm ${realname}U && $mv $realname ${realname}U)' || exit $? fi # Do each of the archive commands. if test "$module" = yes && test -n "$module_cmds" ; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then eval test_cmds=\"$module_expsym_cmds\" cmds=$module_expsym_cmds else eval test_cmds=\"$module_cmds\" cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then eval test_cmds=\"$archive_expsym_cmds\" cmds=$archive_expsym_cmds else eval test_cmds=\"$archive_cmds\" cmds=$archive_cmds fi fi if test "X$skipped_export" != "X:" && len=`expr "X$test_cmds" : ".*"` && test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then : else # The command line is too long to link in one step, link piecewise. $echo "creating reloadable object files..." # Save the value of $output and $libobjs because we want to # use them later. If we have whole_archive_flag_spec, we # want to use save_libobjs as it was before # whole_archive_flag_spec was expanded, because we can't # assume the linker understands whole_archive_flag_spec. # This may have to be revisited, in case too many # convenience libraries get linked in and end up exceeding # the spec. if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then save_libobjs=$libobjs fi save_output=$output # Clear the reloadable object creation command queue and # initialize k to one. test_cmds= concat_cmds= objlist= delfiles= last_robj= k=1 output=$output_objdir/$save_output-${k}.$objext # Loop over the list of objects to be linked. for obj in $save_libobjs do eval test_cmds=\"$reload_cmds $objlist $last_robj\" if test "X$objlist" = X || { len=`expr "X$test_cmds" : ".*"` && test "$len" -le "$max_cmd_len"; }; then objlist="$objlist $obj" else # The command $test_cmds is almost too long, add a # command to the queue. if test "$k" -eq 1 ; then # The first file doesn't have a previous command to add. eval concat_cmds=\"$reload_cmds $objlist $last_robj\" else # All subsequent reloadable object files will link in # the last one created. eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj\" fi last_robj=$output_objdir/$save_output-${k}.$objext k=`expr $k + 1` output=$output_objdir/$save_output-${k}.$objext objlist=$obj len=1 fi done # Handle the remaining objects by creating one last # reloadable object file. All subsequent reloadable object # files will link in the last one created. test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\" if ${skipped_export-false}; then $show "generating symbol list for \`$libname.la'" export_symbols="$output_objdir/$libname.exp" $run $rm $export_symbols libobjs=$output # Append the command to create the export file. eval concat_cmds=\"\$concat_cmds~$export_symbols_cmds\" fi # Set up a command to remove the reloadale object files # after they are used. i=0 while test "$i" -lt "$k" do i=`expr $i + 1` delfiles="$delfiles $output_objdir/$save_output-${i}.$objext" done $echo "creating a temporary reloadable object file: $output" # Loop through the commands generated above and execute them. save_ifs="$IFS"; IFS='~' for cmd in $concat_cmds; do IFS="$save_ifs" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" libobjs=$output # Restore the value of output. output=$save_output if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then eval libobjs=\"\$libobjs $whole_archive_flag_spec\" fi # Expand the library linking commands again to reset the # value of $libobjs for piecewise linking. # Do each of the archive commands. if test "$module" = yes && test -n "$module_cmds" ; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then cmds=$module_expsym_cmds else cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then cmds=$archive_expsym_cmds else cmds=$archive_cmds fi fi # Append the command to remove the reloadable object files # to the just-reset $cmds. eval cmds=\"\$cmds~\$rm $delfiles\" fi save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" # Restore the uninstalled library and exit if test "$mode" = relink; then $run eval '(cd $output_objdir && $rm ${realname}T && $mv $realname ${realname}T && $mv "$realname"U $realname)' || exit $? exit $EXIT_SUCCESS fi # Create links to the real library. for linkname in $linknames; do if test "$realname" != "$linkname"; then $show "(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)" $run eval '(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)' || exit $? fi done # If -module or -export-dynamic was specified, set the dlname. if test "$module" = yes || test "$export_dynamic" = yes; then # On all known operating systems, these are identical. dlname="$soname" fi fi ;; obj) if test -n "$deplibs"; then $echo "$modename: warning: \`-l' and \`-L' are ignored for objects" 1>&2 fi if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then $echo "$modename: warning: \`-dlopen' is ignored for objects" 1>&2 fi if test -n "$rpath"; then $echo "$modename: warning: \`-rpath' is ignored for objects" 1>&2 fi if test -n "$xrpath"; then $echo "$modename: warning: \`-R' is ignored for objects" 1>&2 fi if test -n "$vinfo"; then $echo "$modename: warning: \`-version-info' is ignored for objects" 1>&2 fi if test -n "$release"; then $echo "$modename: warning: \`-release' is ignored for objects" 1>&2 fi case $output in *.lo) if test -n "$objs$old_deplibs"; then $echo "$modename: cannot build library object \`$output' from non-libtool objects" 1>&2 exit $EXIT_FAILURE fi libobj="$output" obj=`$echo "X$output" | $Xsed -e "$lo2o"` ;; *) libobj= obj="$output" ;; esac # Delete the old objects. $run $rm $obj $libobj # Objects from convenience libraries. This assumes # single-version convenience libraries. Whenever we create # different ones for PIC/non-PIC, this we'll have to duplicate # the extraction. reload_conv_objs= gentop= # reload_cmds runs $LD directly, so let us get rid of # -Wl from whole_archive_flag_spec wl= if test -n "$convenience"; then if test -n "$whole_archive_flag_spec"; then eval reload_conv_objs=\"\$reload_objs $whole_archive_flag_spec\" else gentop="$output_objdir/${obj}x" $show "${rm}r $gentop" $run ${rm}r "$gentop" $show "$mkdir $gentop" $run $mkdir "$gentop" status=$? if test "$status" -ne 0 && test ! -d "$gentop"; then exit $status fi generated="$generated $gentop" for xlib in $convenience; do # Extract the objects. case $xlib in [\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;; *) xabs=`pwd`"/$xlib" ;; esac xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'` xdir="$gentop/$xlib" $show "${rm}r $xdir" $run ${rm}r "$xdir" $show "$mkdir $xdir" $run $mkdir "$xdir" status=$? if test "$status" -ne 0 && test ! -d "$xdir"; then exit $status fi # We will extract separately just the conflicting names and we will no # longer touch any unique names. It is faster to leave these extract # automatically by $AR in one run. $show "(cd $xdir && $AR x $xabs)" $run eval "(cd \$xdir && $AR x \$xabs)" || exit $? if ($AR t "$xabs" | sort | sort -uc >/dev/null 2>&1); then : else $echo "$modename: warning: object name conflicts; renaming object files" 1>&2 $echo "$modename: warning: to ensure that they will not overwrite" 1>&2 $AR t "$xabs" | sort | uniq -cd | while read -r count name do i=1 while test "$i" -le "$count" do # Put our $i before any first dot (extension) # Never overwrite any file name_to="$name" while test "X$name_to" = "X$name" || test -f "$xdir/$name_to" do name_to=`$echo "X$name_to" | $Xsed -e "s/\([^.]*\)/\1-$i/"` done $show "(cd $xdir && $AR xN $i $xabs '$name' && $mv '$name' '$name_to')" $run eval "(cd \$xdir && $AR xN $i \$xabs '$name' && $mv '$name' '$name_to')" || exit $? i=`expr $i + 1` done done fi reload_conv_objs="$reload_objs "`find $xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` done fi fi # Create the old-style object. reload_objs="$objs$old_deplibs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test output="$obj" cmds=$reload_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" # Exit if we aren't doing a library object file. if test -z "$libobj"; then if test -n "$gentop"; then $show "${rm}r $gentop" $run ${rm}r $gentop fi exit $EXIT_SUCCESS fi if test "$build_libtool_libs" != yes; then if test -n "$gentop"; then $show "${rm}r $gentop" $run ${rm}r $gentop fi # Create an invalid libtool object if no PIC, so that we don't # accidentally link it into a program. # $show "echo timestamp > $libobj" # $run eval "echo timestamp > $libobj" || exit $? exit $EXIT_SUCCESS fi if test -n "$pic_flag" || test "$pic_mode" != default; then # Only do commands if we really have different PIC objects. reload_objs="$libobjs $reload_conv_objs" output="$libobj" cmds=$reload_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" fi if test -n "$gentop"; then $show "${rm}r $gentop" $run ${rm}r $gentop fi exit $EXIT_SUCCESS ;; prog) case $host in *cygwin*) output=`$echo $output | ${SED} -e 's,.exe$,,;s,$,.exe,'` ;; esac if test -n "$vinfo"; then $echo "$modename: warning: \`-version-info' is ignored for programs" 1>&2 fi if test -n "$release"; then $echo "$modename: warning: \`-release' is ignored for programs" 1>&2 fi if test "$preload" = yes; then if test "$dlopen_support" = unknown && test "$dlopen_self" = unknown && test "$dlopen_self_static" = unknown; then $echo "$modename: warning: \`AC_LIBTOOL_DLOPEN' not used. Assuming no dlopen support." fi fi case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library is the System framework compile_deplibs=`$echo "X $compile_deplibs" | $Xsed -e 's/ -lc / -framework System /'` finalize_deplibs=`$echo "X $finalize_deplibs" | $Xsed -e 's/ -lc / -framework System /'` ;; esac case $host in *darwin*) # Don't allow lazy linking, it breaks C++ global constructors if test "$tagname" = CXX ; then compile_command="$compile_command ${wl}-bind_at_load" finalize_command="$finalize_command ${wl}-bind_at_load" fi ;; esac compile_command="$compile_command $compile_deplibs" finalize_command="$finalize_command $finalize_deplibs" if test -n "$rpath$xrpath"; then # If the user specified any rpath flags, then add them. for libdir in $rpath $xrpath; do # This is the magic to use -rpath. case "$finalize_rpath " in *" $libdir "*) ;; *) finalize_rpath="$finalize_rpath $libdir" ;; esac done fi # Now hardcode the library paths rpath= hardcode_libdirs= for libdir in $compile_rpath $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" rpath="$rpath $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) perm_rpath="$perm_rpath $libdir" ;; esac fi case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) case :$dllsearchpath: in *":$libdir:"*) ;; *) dllsearchpath="$dllsearchpath:$libdir";; esac ;; esac done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" eval rpath=\" $hardcode_libdir_flag_spec\" fi compile_rpath="$rpath" rpath= hardcode_libdirs= for libdir in $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" rpath="$rpath $flag" fi elif test -n "$runpath_var"; then case "$finalize_perm_rpath " in *" $libdir "*) ;; *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" eval rpath=\" $hardcode_libdir_flag_spec\" fi finalize_rpath="$rpath" if test -n "$libobjs" && test "$build_old_libs" = yes; then # Transform all the library objects into standard objects. compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` fi dlsyms= if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then if test -n "$NM" && test -n "$global_symbol_pipe"; then dlsyms="${outputname}S.c" else $echo "$modename: not configured to extract global symbols from dlpreopened files" 1>&2 fi fi if test -n "$dlsyms"; then case $dlsyms in "") ;; *.c) # Discover the nlist of each of the dlfiles. nlist="$output_objdir/${outputname}.nm" $show "$rm $nlist ${nlist}S ${nlist}T" $run $rm "$nlist" "${nlist}S" "${nlist}T" # Parse the name list into a source file. $show "creating $output_objdir/$dlsyms" test -z "$run" && $echo > "$output_objdir/$dlsyms" "\ /* $dlsyms - symbol resolution table for \`$outputname' dlsym emulation. */ /* Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP */ #ifdef __cplusplus extern \"C\" { #endif /* Prevent the only kind of declaration conflicts we can make. */ #define lt_preloaded_symbols some_other_symbol /* External symbol declarations for the compiler. */\ " if test "$dlself" = yes; then $show "generating symbol list for \`$output'" test -z "$run" && $echo ': @PROGRAM@ ' > "$nlist" # Add our own program objects to the symbol list. progfiles=`$echo "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` for arg in $progfiles; do $show "extracting global C symbols from \`$arg'" $run eval "$NM $arg | $global_symbol_pipe >> '$nlist'" done if test -n "$exclude_expsyms"; then $run eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' $run eval '$mv "$nlist"T "$nlist"' fi if test -n "$export_symbols_regex"; then $run eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' $run eval '$mv "$nlist"T "$nlist"' fi # Prepare the list of exported symbols if test -z "$export_symbols"; then export_symbols="$output_objdir/$output.exp" $run $rm $export_symbols $run eval "${SED} -n -e '/^: @PROGRAM@$/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' else $run eval "${SED} -e 's/\([][.*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$output.exp"' $run eval 'grep -f "$output_objdir/$output.exp" < "$nlist" > "$nlist"T' $run eval 'mv "$nlist"T "$nlist"' fi fi for arg in $dlprefiles; do $show "extracting global C symbols from \`$arg'" name=`$echo "$arg" | ${SED} -e 's%^.*/%%'` $run eval '$echo ": $name " >> "$nlist"' $run eval "$NM $arg | $global_symbol_pipe >> '$nlist'" done if test -z "$run"; then # Make sure we have at least an empty file. test -f "$nlist" || : > "$nlist" if test -n "$exclude_expsyms"; then $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T $mv "$nlist"T "$nlist" fi # Try sorting and uniquifying the output. if grep -v "^: " < "$nlist" | if sort -k 3 /dev/null 2>&1; then sort -k 3 else sort +2 fi | uniq > "$nlist"S; then : else grep -v "^: " < "$nlist" > "$nlist"S fi if test -f "$nlist"S; then eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$dlsyms"' else $echo '/* NONE */' >> "$output_objdir/$dlsyms" fi $echo >> "$output_objdir/$dlsyms" "\ #undef lt_preloaded_symbols #if defined (__STDC__) && __STDC__ # define lt_ptr void * #else # define lt_ptr char * # define const #endif /* The mapping between symbol names and symbols. */ const struct { const char *name; lt_ptr address; } lt_preloaded_symbols[] = {\ " eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$dlsyms" $echo >> "$output_objdir/$dlsyms" "\ {0, (lt_ptr) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt_preloaded_symbols; } #endif #ifdef __cplusplus } #endif\ " fi pic_flag_for_symtable= case $host in # compiling the symbol table file with pic_flag works around # a FreeBSD bug that causes programs to crash when -lm is # linked before any other PIC object. But we must not use # pic_flag when linking with -static. The problem exists in # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) case "$compile_command " in *" -static "*) ;; *) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND";; esac;; *-*-hpux*) case "$compile_command " in *" -static "*) ;; *) pic_flag_for_symtable=" $pic_flag";; esac esac # Now compile the dynamic symbol file. $show "(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable \"$dlsyms\")" $run eval '(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable "$dlsyms")' || exit $? # Clean up the generated files. $show "$rm $output_objdir/$dlsyms $nlist ${nlist}S ${nlist}T" $run $rm "$output_objdir/$dlsyms" "$nlist" "${nlist}S" "${nlist}T" # Transform the symbol file into the correct name. compile_command=`$echo "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"` finalize_command=`$echo "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"` ;; *) $echo "$modename: unknown suffix for \`$dlsyms'" 1>&2 exit $EXIT_FAILURE ;; esac else # We keep going just in case the user didn't refer to # lt_preloaded_symbols. The linker will fail if global_symbol_pipe # really was required. # Nullify the symbol file. compile_command=`$echo "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"` finalize_command=`$echo "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"` fi if test "$need_relink" = no || test "$build_libtool_libs" != yes; then # Replace the output file specification. compile_command=`$echo "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` link_command="$compile_command$compile_rpath" # We have no uninstalled library dependencies, so finalize right now. $show "$link_command" $run eval "$link_command" status=$? # Delete the generated files. if test -n "$dlsyms"; then $show "$rm $output_objdir/${outputname}S.${objext}" $run $rm "$output_objdir/${outputname}S.${objext}" fi exit $status fi if test -n "$shlibpath_var"; then # We should set the shlibpath_var rpath= for dir in $temp_rpath; do case $dir in [\\/]* | [A-Za-z]:[\\/]*) # Absolute path. rpath="$rpath$dir:" ;; *) # Relative path: add a thisdir entry. rpath="$rpath\$thisdir/$dir:" ;; esac done temp_rpath="$rpath" fi if test -n "$compile_shlibpath$finalize_shlibpath"; then compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" fi if test -n "$finalize_shlibpath"; then finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" fi compile_var= finalize_var= if test -n "$runpath_var"; then if test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do rpath="$rpath$dir:" done compile_var="$runpath_var=\"$rpath\$$runpath_var\" " fi if test -n "$finalize_perm_rpath"; then # We should set the runpath_var. rpath= for dir in $finalize_perm_rpath; do rpath="$rpath$dir:" done finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " fi fi if test "$no_install" = yes; then # We don't need to create a wrapper script. link_command="$compile_var$compile_command$compile_rpath" # Replace the output file specification. link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` # Delete the old output file. $run $rm $output # Link the executable and exit $show "$link_command" $run eval "$link_command" || exit $? exit $EXIT_SUCCESS fi if test "$hardcode_action" = relink; then # Fast installation is not supported link_command="$compile_var$compile_command$compile_rpath" relink_command="$finalize_var$finalize_command$finalize_rpath" $echo "$modename: warning: this platform does not like uninstalled shared libraries" 1>&2 $echo "$modename: \`$output' will be relinked during installation" 1>&2 else if test "$fast_install" != no; then link_command="$finalize_var$compile_command$finalize_rpath" if test "$fast_install" = yes; then relink_command=`$echo "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'` else # fast_install is set to needless relink_command= fi else link_command="$compile_var$compile_command$compile_rpath" relink_command="$finalize_var$finalize_command$finalize_rpath" fi fi # Replace the output file specification. link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` # Delete the old output files. $run $rm $output $output_objdir/$outputname $output_objdir/lt-$outputname $show "$link_command" $run eval "$link_command" || exit $? # Now create the wrapper script. $show "creating $output" # Quote the relink command for shipping. if test -n "$relink_command"; then # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"` relink_command="$var=\"$var_value\"; export $var; $relink_command" fi done relink_command="(cd `pwd`; $relink_command)" relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"` fi # Quote $echo for shipping. if test "X$echo" = "X$SHELL $progpath --fallback-echo"; then case $progpath in [\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";; *) qecho="$SHELL `pwd`/$progpath --fallback-echo";; esac qecho=`$echo "X$qecho" | $Xsed -e "$sed_quote_subst"` else qecho=`$echo "X$echo" | $Xsed -e "$sed_quote_subst"` fi # Only actually do things if our run command is non-null. if test -z "$run"; then # win32 will think the script is a binary if it has # a .exe suffix, so we strip it off here. case $output in *.exe) output=`$echo $output|${SED} 's,.exe$,,'` ;; esac # test for cygwin because mv fails w/o .exe extensions case $host in *cygwin*) exeext=.exe outputname=`$echo $outputname|${SED} 's,.exe$,,'` ;; *) exeext= ;; esac case $host in *cygwin* | *mingw* ) cwrappersource=`$echo ${objdir}/lt-${output}.c` cwrapper=`$echo ${output}.exe` $rm $cwrappersource $cwrapper trap "$rm $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 cat > $cwrappersource <> $cwrappersource<<"EOF" #include #include #include #include #include #include #if defined(PATH_MAX) # define LT_PATHMAX PATH_MAX #elif defined(MAXPATHLEN) # define LT_PATHMAX MAXPATHLEN #else # define LT_PATHMAX 1024 #endif #ifndef DIR_SEPARATOR #define DIR_SEPARATOR '/' #endif #if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ defined (__OS2__) #define HAVE_DOS_BASED_FILE_SYSTEM #ifndef DIR_SEPARATOR_2 #define DIR_SEPARATOR_2 '\\' #endif #endif #ifndef DIR_SEPARATOR_2 # define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) #else /* DIR_SEPARATOR_2 */ # define IS_DIR_SEPARATOR(ch) \ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) #endif /* DIR_SEPARATOR_2 */ #define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) #define XFREE(stale) do { \ if (stale) { free ((void *) stale); stale = 0; } \ } while (0) const char *program_name = NULL; void * xmalloc (size_t num); char * xstrdup (const char *string); char * basename (const char *name); char * fnqualify(const char *path); char * strendzap(char *str, const char *pat); void lt_fatal (const char *message, ...); int main (int argc, char *argv[]) { char **newargz; int i; program_name = (char *) xstrdup ((char *) basename (argv[0])); newargz = XMALLOC(char *, argc+2); EOF cat >> $cwrappersource <> $cwrappersource <<"EOF" newargz[1] = fnqualify(argv[0]); /* we know the script has the same name, without the .exe */ /* so make sure newargz[1] doesn't end in .exe */ strendzap(newargz[1],".exe"); for (i = 1; i < argc; i++) newargz[i+1] = xstrdup(argv[i]); newargz[argc+1] = NULL; EOF cat >> $cwrappersource <> $cwrappersource <<"EOF" } void * xmalloc (size_t num) { void * p = (void *) malloc (num); if (!p) lt_fatal ("Memory exhausted"); return p; } char * xstrdup (const char *string) { return string ? strcpy ((char *) xmalloc (strlen (string) + 1), string) : NULL ; } char * basename (const char *name) { const char *base; #if defined (HAVE_DOS_BASED_FILE_SYSTEM) /* Skip over the disk name in MSDOS pathnames. */ if (isalpha (name[0]) && name[1] == ':') name += 2; #endif for (base = name; *name; name++) if (IS_DIR_SEPARATOR (*name)) base = name + 1; return (char *) base; } char * fnqualify(const char *path) { size_t size; char *p; char tmp[LT_PATHMAX + 1]; assert(path != NULL); /* Is it qualified already? */ #if defined (HAVE_DOS_BASED_FILE_SYSTEM) if (isalpha (path[0]) && path[1] == ':') return xstrdup (path); #endif if (IS_DIR_SEPARATOR (path[0])) return xstrdup (path); /* prepend the current directory */ /* doesn't handle '~' */ if (getcwd (tmp, LT_PATHMAX) == NULL) lt_fatal ("getcwd failed"); size = strlen(tmp) + 1 + strlen(path) + 1; /* +2 for '/' and '\0' */ p = XMALLOC(char, size); sprintf(p, "%s%c%s", tmp, DIR_SEPARATOR, path); return p; } char * strendzap(char *str, const char *pat) { size_t len, patlen; assert(str != NULL); assert(pat != NULL); len = strlen(str); patlen = strlen(pat); if (patlen <= len) { str += len - patlen; if (strcmp(str, pat) == 0) *str = '\0'; } return str; } static void lt_error_core (int exit_status, const char * mode, const char * message, va_list ap) { fprintf (stderr, "%s: %s: ", program_name, mode); vfprintf (stderr, message, ap); fprintf (stderr, ".\n"); if (exit_status >= 0) exit (exit_status); } void lt_fatal (const char *message, ...) { va_list ap; va_start (ap, message); lt_error_core (EXIT_FAILURE, "FATAL", message, ap); va_end (ap); } EOF # we should really use a build-platform specific compiler # here, but OTOH, the wrappers (shell script and this C one) # are only useful if you want to execute the "real" binary. # Since the "real" binary is built for $host, then this # wrapper might as well be built for $host, too. $run $LTCC -s -o $cwrapper $cwrappersource ;; esac $rm $output trap "$rm $output; exit $EXIT_FAILURE" 1 2 15 $echo > $output "\ #! $SHELL # $output - temporary wrapper script for $objdir/$outputname # Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP # # The $output program cannot be directly executed until all the libtool # libraries that it depends on are installed. # # This wrapper script should never be moved out of the build directory. # If it is, it will not operate correctly. # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. Xsed='${SED} -e 1s/^X//' sed_quote_subst='$sed_quote_subst' # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. if test \"\${CDPATH+set}\" = set; then CDPATH=:; export CDPATH; fi relink_command=\"$relink_command\" # This environment variable determines our operation mode. if test \"\$libtool_install_magic\" = \"$magic\"; then # install mode needs the following variable: notinst_deplibs='$notinst_deplibs' else # When we are sourced in execute mode, \$file and \$echo are already set. if test \"\$libtool_execute_magic\" != \"$magic\"; then echo=\"$qecho\" file=\"\$0\" # Make sure echo works. if test \"X\$1\" = X--no-reexec; then # Discard the --no-reexec flag, and continue. shift elif test \"X\`(\$echo '\t') 2>/dev/null\`\" = 'X\t'; then # Yippee, \$echo works! : else # Restart under the correct shell, and then maybe \$echo will work. exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"} fi fi\ " $echo >> $output "\ # Find the directory that this script lives in. thisdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\` test \"x\$thisdir\" = \"x\$file\" && thisdir=. # Follow symbolic links until we get to the real thisdir. file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\` while test -n \"\$file\"; do destdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\` # If there was a directory component, then change thisdir. if test \"x\$destdir\" != \"x\$file\"; then case \"\$destdir\" in [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; *) thisdir=\"\$thisdir/\$destdir\" ;; esac fi file=\`\$echo \"X\$file\" | \$Xsed -e 's%^.*/%%'\` file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\` done # Try to get the absolute directory name. absdir=\`cd \"\$thisdir\" && pwd\` test -n \"\$absdir\" && thisdir=\"\$absdir\" " if test "$fast_install" = yes; then $echo >> $output "\ program=lt-'$outputname'$exeext progdir=\"\$thisdir/$objdir\" if test ! -f \"\$progdir/\$program\" || \\ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ test \"X\$file\" != \"X\$progdir/\$program\"; }; then file=\"\$\$-\$program\" if test ! -d \"\$progdir\"; then $mkdir \"\$progdir\" else $rm \"\$progdir/\$file\" fi" $echo >> $output "\ # relink executable if necessary if test -n \"\$relink_command\"; then if relink_command_output=\`eval \$relink_command 2>&1\`; then : else $echo \"\$relink_command_output\" >&2 $rm \"\$progdir/\$file\" exit $EXIT_FAILURE fi fi $mv \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || { $rm \"\$progdir/\$program\"; $mv \"\$progdir/\$file\" \"\$progdir/\$program\"; } $rm \"\$progdir/\$file\" fi" else $echo >> $output "\ program='$outputname' progdir=\"\$thisdir/$objdir\" " fi $echo >> $output "\ if test -f \"\$progdir/\$program\"; then" # Export our shlibpath_var if we have one. if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then $echo >> $output "\ # Add our own library path to $shlibpath_var $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" # Some systems cannot cope with colon-terminated $shlibpath_var # The second colon is a workaround for a bug in BeOS R4 sed $shlibpath_var=\`\$echo \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\` export $shlibpath_var " fi # fixup the dll searchpath if we need to. if test -n "$dllsearchpath"; then $echo >> $output "\ # Add the dll search path components to the executable PATH PATH=$dllsearchpath:\$PATH " fi $echo >> $output "\ if test \"\$libtool_execute_magic\" != \"$magic\"; then # Run the actual program with our arguments. " case $host in # Backslashes separate directories on plain windows *-*-mingw | *-*-os2*) $echo >> $output "\ exec \$progdir\\\\\$program \${1+\"\$@\"} " ;; *) $echo >> $output "\ exec \$progdir/\$program \${1+\"\$@\"} " ;; esac $echo >> $output "\ \$echo \"\$0: cannot exec \$program \${1+\"\$@\"}\" exit $EXIT_FAILURE fi else # The program doesn't exist. \$echo \"\$0: error: \$progdir/\$program does not exist\" 1>&2 \$echo \"This script is just a wrapper for \$program.\" 1>&2 $echo \"See the $PACKAGE documentation for more information.\" 1>&2 exit $EXIT_FAILURE fi fi\ " chmod +x $output fi exit $EXIT_SUCCESS ;; esac # See if we need to build an old-fashioned archive. for oldlib in $oldlibs; do if test "$build_libtool_libs" = convenience; then oldobjs="$libobjs_save" addlibs="$convenience" build_libtool_libs=no else if test "$build_libtool_libs" = module; then oldobjs="$libobjs_save" build_libtool_libs=no else oldobjs="$old_deplibs $non_pic_objects" fi addlibs="$old_convenience" fi if test -n "$addlibs"; then gentop="$output_objdir/${outputname}x" $show "${rm}r $gentop" $run ${rm}r "$gentop" $show "$mkdir $gentop" $run $mkdir "$gentop" status=$? if test "$status" -ne 0 && test ! -d "$gentop"; then exit $status fi generated="$generated $gentop" # Add in members from convenience archives. for xlib in $addlibs; do # Extract the objects. case $xlib in [\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;; *) xabs=`pwd`"/$xlib" ;; esac xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'` xdir="$gentop/$xlib" $show "${rm}r $xdir" $run ${rm}r "$xdir" $show "$mkdir $xdir" $run $mkdir "$xdir" status=$? if test "$status" -ne 0 && test ! -d "$xdir"; then exit $status fi # We will extract separately just the conflicting names and we will no # longer touch any unique names. It is faster to leave these extract # automatically by $AR in one run. $show "(cd $xdir && $AR x $xabs)" $run eval "(cd \$xdir && $AR x \$xabs)" || exit $? if ($AR t "$xabs" | sort | sort -uc >/dev/null 2>&1); then : else $echo "$modename: warning: object name conflicts; renaming object files" 1>&2 $echo "$modename: warning: to ensure that they will not overwrite" 1>&2 $AR t "$xabs" | sort | uniq -cd | while read -r count name do i=1 while test "$i" -le "$count" do # Put our $i before any first dot (extension) # Never overwrite any file name_to="$name" while test "X$name_to" = "X$name" || test -f "$xdir/$name_to" do name_to=`$echo "X$name_to" | $Xsed -e "s/\([^.]*\)/\1-$i/"` done $show "(cd $xdir && $AR xN $i $xabs '$name' && $mv '$name' '$name_to')" $run eval "(cd \$xdir && $AR xN $i \$xabs '$name' && $mv '$name' '$name_to')" || exit $? i=`expr $i + 1` done done fi oldobjs="$oldobjs "`find $xdir -name \*.${objext} -print -o -name \*.lo -print | $NL2SP` done fi # Do each command in the archive commands. if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then cmds=$old_archive_from_new_cmds else eval cmds=\"$old_archive_cmds\" if len=`expr "X$cmds" : ".*"` && test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then cmds=$old_archive_cmds else # the command line is too long to link in one step, link in parts $echo "using piecewise archive linking..." save_RANLIB=$RANLIB RANLIB=: objlist= concat_cmds= save_oldobjs=$oldobjs # GNU ar 2.10+ was changed to match POSIX; thus no paths are # encoded into archives. This makes 'ar r' malfunction in # this piecewise linking case whenever conflicting object # names appear in distinct ar calls; check, warn and compensate. if (for obj in $save_oldobjs do $echo "X$obj" | $Xsed -e 's%^.*/%%' done | sort | sort -uc >/dev/null 2>&1); then : else $echo "$modename: warning: object name conflicts; overriding AR_FLAGS to 'cq'" 1>&2 $echo "$modename: warning: to ensure that POSIX-compatible ar will work" 1>&2 AR_FLAGS=cq fi # Is there a better way of finding the last object in the list? for obj in $save_oldobjs do last_oldobj=$obj done for obj in $save_oldobjs do oldobjs="$objlist $obj" objlist="$objlist $obj" eval test_cmds=\"$old_archive_cmds\" if len=`expr "X$test_cmds" : ".*"` && test "$len" -le "$max_cmd_len"; then : else # the above command should be used before it gets too long oldobjs=$objlist if test "$obj" = "$last_oldobj" ; then RANLIB=$save_RANLIB fi test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" objlist= fi done RANLIB=$save_RANLIB oldobjs=$objlist if test "X$oldobjs" = "X" ; then eval cmds=\"\$concat_cmds\" else eval cmds=\"\$concat_cmds~\$old_archive_cmds\" fi fi fi save_ifs="$IFS"; IFS='~' for cmd in $cmds; do eval cmd=\"$cmd\" IFS="$save_ifs" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" done if test -n "$generated"; then $show "${rm}r$generated" $run ${rm}r$generated fi # Now create the libtool archive. case $output in *.la) old_library= test "$build_old_libs" = yes && old_library="$libname.$libext" $show "creating $output" # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"` relink_command="$var=\"$var_value\"; export $var; $relink_command" fi done # Quote the link command for shipping. relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"` if test "$hardcode_automatic" = yes ; then relink_command= fi # Only create the output if not a dry run. if test -z "$run"; then for installed in no yes; do if test "$installed" = yes; then if test -z "$install_libdir"; then break fi output="$output_objdir/$outputname"i # Replace all uninstalled libtool libraries with the installed ones newdependency_libs= for deplib in $dependency_libs; do case $deplib in *.la) name=`$echo "X$deplib" | $Xsed -e 's%^.*/%%'` eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` if test -z "$libdir"; then $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2 exit $EXIT_FAILURE fi newdependency_libs="$newdependency_libs $libdir/$name" ;; *) newdependency_libs="$newdependency_libs $deplib" ;; esac done dependency_libs="$newdependency_libs" newdlfiles= for lib in $dlfiles; do name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` if test -z "$libdir"; then $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 exit $EXIT_FAILURE fi newdlfiles="$newdlfiles $libdir/$name" done dlfiles="$newdlfiles" newdlprefiles= for lib in $dlprefiles; do name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` if test -z "$libdir"; then $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 exit $EXIT_FAILURE fi newdlprefiles="$newdlprefiles $libdir/$name" done dlprefiles="$newdlprefiles" else newdlfiles= for lib in $dlfiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac newdlfiles="$newdlfiles $abs" done dlfiles="$newdlfiles" newdlprefiles= for lib in $dlprefiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac newdlprefiles="$newdlprefiles $abs" done dlprefiles="$newdlprefiles" fi $rm $output # place dlname in correct position for cygwin tdlname=$dlname case $host,$output,$installed,$module,$dlname in *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;; esac $echo > $output "\ # $outputname - a libtool library file # Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP # # Please DO NOT delete this file! # It is necessary for linking the library. # The name that we can dlopen(3). dlname='$tdlname' # Names of this library. library_names='$library_names' # The name of the static archive. old_library='$old_library' # Libraries that this one depends upon. dependency_libs='$dependency_libs' # Version information for $libname. current=$current age=$age revision=$revision # Is this an already installed library? installed=$installed # Should we warn about portability when linking against -modules? shouldnotlink=$module # Files to dlopen/dlpreopen dlopen='$dlfiles' dlpreopen='$dlprefiles' # Directory that this library needs to be installed in: libdir='$install_libdir'" if test "$installed" = no && test "$need_relink" = yes; then $echo >> $output "\ relink_command=\"$relink_command\"" fi done fi # Do a symbolic link so that the libtool archive can be found in # LD_LIBRARY_PATH before the program is installed. $show "(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)" $run eval '(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)' || exit $? ;; esac exit $EXIT_SUCCESS ;; # libtool install mode install) modename="$modename: install" # There may be an optional sh(1) argument at the beginning of # install_prog (especially on Windows NT). if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || # Allow the use of GNU shtool's install command. $echo "X$nonopt" | $Xsed | grep shtool > /dev/null; then # Aesthetically quote it. arg=`$echo "X$nonopt" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*) arg="\"$arg\"" ;; esac install_prog="$arg " arg="$1" shift else install_prog= arg="$nonopt" fi # The real first argument should be the name of the installation program. # Aesthetically quote it. arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*) arg="\"$arg\"" ;; esac install_prog="$install_prog$arg" # We need to accept at least all the BSD install flags. dest= files= opts= prev= install_type= isdir=no stripme= for arg do if test -n "$dest"; then files="$files $dest" dest="$arg" continue fi case $arg in -d) isdir=yes ;; -f) prev="-f" ;; -g) prev="-g" ;; -m) prev="-m" ;; -o) prev="-o" ;; -s) stripme=" -s" continue ;; -*) ;; *) # If the previous option needed an argument, then skip it. if test -n "$prev"; then prev= else dest="$arg" continue fi ;; esac # Aesthetically quote the argument. arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` case $arg in *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*) arg="\"$arg\"" ;; esac install_prog="$install_prog $arg" done if test -z "$install_prog"; then $echo "$modename: you must specify an install program" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi if test -n "$prev"; then $echo "$modename: the \`$prev' option requires an argument" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi if test -z "$files"; then if test -z "$dest"; then $echo "$modename: no file or destination specified" 1>&2 else $echo "$modename: you must specify a destination" 1>&2 fi $echo "$help" 1>&2 exit $EXIT_FAILURE fi # Strip any trailing slash from the destination. dest=`$echo "X$dest" | $Xsed -e 's%/$%%'` # Check to see that the destination is a directory. test -d "$dest" && isdir=yes if test "$isdir" = yes; then destdir="$dest" destname= else destdir=`$echo "X$dest" | $Xsed -e 's%/[^/]*$%%'` test "X$destdir" = "X$dest" && destdir=. destname=`$echo "X$dest" | $Xsed -e 's%^.*/%%'` # Not a directory, so check to see that there is only one file specified. set dummy $files if test "$#" -gt 2; then $echo "$modename: \`$dest' is not a directory" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi fi case $destdir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) for file in $files; do case $file in *.lo) ;; *) $echo "$modename: \`$destdir' must be an absolute directory name" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE ;; esac done ;; esac # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic="$magic" staticlibs= future_libdirs= current_libdirs= for file in $files; do # Do each installation. case $file in *.$libext) # Do the static libraries later. staticlibs="$staticlibs $file" ;; *.la) # Check to see that this really is a libtool archive. if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : else $echo "$modename: \`$file' is not a valid libtool archive" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi library_names= old_library= relink_command= # If there is no directory component, then add one. case $file in */* | *\\*) . $file ;; *) . ./$file ;; esac # Add the libdir to current_libdirs if it is the destination. if test "X$destdir" = "X$libdir"; then case "$current_libdirs " in *" $libdir "*) ;; *) current_libdirs="$current_libdirs $libdir" ;; esac else # Note the libdir as a future libdir. case "$future_libdirs " in *" $libdir "*) ;; *) future_libdirs="$future_libdirs $libdir" ;; esac fi dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`/ test "X$dir" = "X$file/" && dir= dir="$dir$objdir" if test -n "$relink_command"; then # Determine the prefix the user has applied to our future dir. inst_prefix_dir=`$echo "$destdir" | $SED "s%$libdir\$%%"` # Don't allow the user to place us outside of our expected # location b/c this prevents finding dependent libraries that # are installed to the same prefix. # At present, this check doesn't affect windows .dll's that # are installed into $libdir/../bin (currently, that works fine) # but it's something to keep an eye on. if test "$inst_prefix_dir" = "$destdir"; then $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2 exit $EXIT_FAILURE fi if test -n "$inst_prefix_dir"; then # Stick the inst_prefix_dir data into the link command. relink_command=`$echo "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` else relink_command=`$echo "$relink_command" | $SED "s%@inst_prefix_dir@%%"` fi $echo "$modename: warning: relinking \`$file'" 1>&2 $show "$relink_command" if $run eval "$relink_command"; then : else $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2 exit $EXIT_FAILURE fi fi # See the names of the shared library. set dummy $library_names if test -n "$2"; then realname="$2" shift shift srcname="$realname" test -n "$relink_command" && srcname="$realname"T # Install the shared library and build the symlinks. $show "$install_prog $dir/$srcname $destdir/$realname" $run eval "$install_prog $dir/$srcname $destdir/$realname" || exit $? if test -n "$stripme" && test -n "$striplib"; then $show "$striplib $destdir/$realname" $run eval "$striplib $destdir/$realname" || exit $? fi if test "$#" -gt 0; then # Delete the old symlinks, and create new ones. for linkname do if test "$linkname" != "$realname"; then $show "(cd $destdir && $rm $linkname && $LN_S $realname $linkname)" $run eval "(cd $destdir && $rm $linkname && $LN_S $realname $linkname)" fi done fi # Do each command in the postinstall commands. lib="$destdir/$realname" cmds=$postinstall_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" fi # Install the pseudo-library for information purposes. name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` instname="$dir/$name"i $show "$install_prog $instname $destdir/$name" $run eval "$install_prog $instname $destdir/$name" || exit $? # Maybe install the static library, too. test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" ;; *.lo) # Install (i.e. copy) a libtool object. # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile="$destdir/$destname" else destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'` destfile="$destdir/$destfile" fi # Deduce the name of the destination old-style object file. case $destfile in *.lo) staticdest=`$echo "X$destfile" | $Xsed -e "$lo2o"` ;; *.$objext) staticdest="$destfile" destfile= ;; *) $echo "$modename: cannot copy a libtool object to \`$destfile'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE ;; esac # Install the libtool object if requested. if test -n "$destfile"; then $show "$install_prog $file $destfile" $run eval "$install_prog $file $destfile" || exit $? fi # Install the old object if enabled. if test "$build_old_libs" = yes; then # Deduce the name of the old-style object file. staticobj=`$echo "X$file" | $Xsed -e "$lo2o"` $show "$install_prog $staticobj $staticdest" $run eval "$install_prog \$staticobj \$staticdest" || exit $? fi exit $EXIT_SUCCESS ;; *) # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile="$destdir/$destname" else destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'` destfile="$destdir/$destfile" fi # If the file is missing, and there is a .exe on the end, strip it # because it is most likely a libtool script we actually want to # install stripped_ext="" case $file in *.exe) if test ! -f "$file"; then file=`$echo $file|${SED} 's,.exe$,,'` stripped_ext=".exe" fi ;; esac # Do a test to see if this is really a libtool program. case $host in *cygwin*|*mingw*) wrapper=`$echo $file | ${SED} -e 's,.exe$,,'` ;; *) wrapper=$file ;; esac if (${SED} -e '4q' $wrapper | grep "^# Generated by .*$PACKAGE")>/dev/null 2>&1; then notinst_deplibs= relink_command= # To insure that "foo" is sourced, and not "foo.exe", # finese the cygwin/MSYS system by explicitly sourcing "foo." # which disallows the automatic-append-.exe behavior. case $build in *cygwin* | *mingw*) wrapperdot=${wrapper}. ;; *) wrapperdot=${wrapper} ;; esac # If there is no directory component, then add one. case $file in */* | *\\*) . ${wrapperdot} ;; *) . ./${wrapperdot} ;; esac # Check the variables that should have been set. if test -z "$notinst_deplibs"; then $echo "$modename: invalid libtool wrapper script \`$wrapper'" 1>&2 exit $EXIT_FAILURE fi finalize=yes for lib in $notinst_deplibs; do # Check to see that each library is installed. libdir= if test -f "$lib"; then # If there is no directory component, then add one. case $lib in */* | *\\*) . $lib ;; *) . ./$lib ;; esac fi libfile="$libdir/"`$echo "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test if test -n "$libdir" && test ! -f "$libfile"; then $echo "$modename: warning: \`$lib' has not been installed in \`$libdir'" 1>&2 finalize=no fi done relink_command= # To insure that "foo" is sourced, and not "foo.exe", # finese the cygwin/MSYS system by explicitly sourcing "foo." # which disallows the automatic-append-.exe behavior. case $build in *cygwin* | *mingw*) wrapperdot=${wrapper}. ;; *) wrapperdot=${wrapper} ;; esac # If there is no directory component, then add one. case $file in */* | *\\*) . ${wrapperdot} ;; *) . ./${wrapperdot} ;; esac outputname= if test "$fast_install" = no && test -n "$relink_command"; then if test "$finalize" = yes && test -z "$run"; then tmpdir="/tmp" test -n "$TMPDIR" && tmpdir="$TMPDIR" tmpdir="$tmpdir/libtool-$$" save_umask=`umask` umask 0077 if $mkdir "$tmpdir"; then umask $save_umask else umask $save_umask $echo "$modename: error: cannot create temporary directory \`$tmpdir'" 1>&2 continue fi file=`$echo "X$file$stripped_ext" | $Xsed -e 's%^.*/%%'` outputname="$tmpdir/$file" # Replace the output file specification. relink_command=`$echo "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'` $show "$relink_command" if $run eval "$relink_command"; then : else $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2 ${rm}r "$tmpdir" continue fi file="$outputname" else $echo "$modename: warning: cannot relink \`$file'" 1>&2 fi else # Install the binary that we compiled earlier. file=`$echo "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"` fi fi # remove .exe since cygwin /usr/bin/install will append another # one anyways case $install_prog,$host in */usr/bin/install*,*cygwin*) case $file:$destfile in *.exe:*.exe) # this is ok ;; *.exe:*) destfile=$destfile.exe ;; *:*.exe) destfile=`$echo $destfile | ${SED} -e 's,.exe$,,'` ;; esac ;; esac $show "$install_prog$stripme $file $destfile" $run eval "$install_prog\$stripme \$file \$destfile" || exit $? test -n "$outputname" && ${rm}r "$tmpdir" ;; esac done for file in $staticlibs; do name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` # Set up the ranlib parameters. oldlib="$destdir/$name" $show "$install_prog $file $oldlib" $run eval "$install_prog \$file \$oldlib" || exit $? if test -n "$stripme" && test -n "$old_striplib"; then $show "$old_striplib $oldlib" $run eval "$old_striplib $oldlib" || exit $? fi # Do each command in the postinstall commands. cmds=$old_postinstall_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || exit $? done IFS="$save_ifs" done if test -n "$future_libdirs"; then $echo "$modename: warning: remember to run \`$progname --finish$future_libdirs'" 1>&2 fi if test -n "$current_libdirs"; then # Maybe just do a dry run. test -n "$run" && current_libdirs=" -n$current_libdirs" exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' else exit $EXIT_SUCCESS fi ;; # libtool finish mode finish) modename="$modename: finish" libdirs="$nonopt" admincmds= if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then for dir do libdirs="$libdirs $dir" done for libdir in $libdirs; do if test -n "$finish_cmds"; then # Do each command in the finish commands. cmds=$finish_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" || admincmds="$admincmds $cmd" done IFS="$save_ifs" fi if test -n "$finish_eval"; then # Do the single finish_eval. eval cmds=\"$finish_eval\" $run eval "$cmds" || admincmds="$admincmds $cmds" fi done fi # Exit here if they wanted silent mode. test "$show" = : && exit $EXIT_SUCCESS $echo "----------------------------------------------------------------------" $echo "Libraries have been installed in:" for libdir in $libdirs; do $echo " $libdir" done $echo $echo "If you ever happen to want to link against installed libraries" $echo "in a given directory, LIBDIR, you must either use libtool, and" $echo "specify the full pathname of the library, or use the \`-LLIBDIR'" $echo "flag during linking and do at least one of the following:" if test -n "$shlibpath_var"; then $echo " - add LIBDIR to the \`$shlibpath_var' environment variable" $echo " during execution" fi if test -n "$runpath_var"; then $echo " - add LIBDIR to the \`$runpath_var' environment variable" $echo " during linking" fi if test -n "$hardcode_libdir_flag_spec"; then libdir=LIBDIR eval flag=\"$hardcode_libdir_flag_spec\" $echo " - use the \`$flag' linker flag" fi if test -n "$admincmds"; then $echo " - have your system administrator run these commands:$admincmds" fi if test -f /etc/ld.so.conf; then $echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" fi $echo $echo "See any operating system documentation about shared libraries for" $echo "more information, such as the ld(1) and ld.so(8) manual pages." $echo "----------------------------------------------------------------------" exit $EXIT_SUCCESS ;; # libtool execute mode execute) modename="$modename: execute" # The first argument is the command name. cmd="$nonopt" if test -z "$cmd"; then $echo "$modename: you must specify a COMMAND" 1>&2 $echo "$help" exit $EXIT_FAILURE fi # Handle -dlopen flags immediately. for file in $execute_dlfiles; do if test ! -f "$file"; then $echo "$modename: \`$file' is not a file" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi dir= case $file in *.la) # Check to see that this really is a libtool archive. if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : else $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi # Read the libtool library. dlname= library_names= # If there is no directory component, then add one. case $file in */* | *\\*) . $file ;; *) . ./$file ;; esac # Skip this library if it cannot be dlopened. if test -z "$dlname"; then # Warn if it was a shared library. test -n "$library_names" && $echo "$modename: warning: \`$file' was not linked with \`-export-dynamic'" continue fi dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` test "X$dir" = "X$file" && dir=. if test -f "$dir/$objdir/$dlname"; then dir="$dir/$objdir" else $echo "$modename: cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" 1>&2 exit $EXIT_FAILURE fi ;; *.lo) # Just add the directory containing the .lo file. dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` test "X$dir" = "X$file" && dir=. ;; *) $echo "$modename: warning \`-dlopen' is ignored for non-libtool libraries and objects" 1>&2 continue ;; esac # Get the absolute pathname. absdir=`cd "$dir" && pwd` test -n "$absdir" && dir="$absdir" # Now add the directory to shlibpath_var. if eval "test -z \"\$$shlibpath_var\""; then eval "$shlibpath_var=\"\$dir\"" else eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" fi done # This variable tells wrapper scripts just to set shlibpath_var # rather than running their programs. libtool_execute_magic="$magic" # Check if any of the arguments is a wrapper script. args= for file do case $file in -*) ;; *) # Do a test to see if this is really a libtool program. if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then # If there is no directory component, then add one. case $file in */* | *\\*) . $file ;; *) . ./$file ;; esac # Transform arg to wrapped name. file="$progdir/$program" fi ;; esac # Quote arguments (to preserve shell metacharacters). file=`$echo "X$file" | $Xsed -e "$sed_quote_subst"` args="$args \"$file\"" done if test -z "$run"; then if test -n "$shlibpath_var"; then # Export the shlibpath_var. eval "export $shlibpath_var" fi # Restore saved environment variables if test "${save_LC_ALL+set}" = set; then LC_ALL="$save_LC_ALL"; export LC_ALL fi if test "${save_LANG+set}" = set; then LANG="$save_LANG"; export LANG fi # Now prepare to actually exec the command. exec_cmd="\$cmd$args" else # Display what would be done. if test -n "$shlibpath_var"; then eval "\$echo \"\$shlibpath_var=\$$shlibpath_var\"" $echo "export $shlibpath_var" fi $echo "$cmd$args" exit $EXIT_SUCCESS fi ;; # libtool clean and uninstall mode clean | uninstall) modename="$modename: $mode" rm="$nonopt" files= rmforce= exit_status=0 # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic="$magic" for arg do case $arg in -f) rm="$rm $arg"; rmforce=yes ;; -*) rm="$rm $arg" ;; *) files="$files $arg" ;; esac done if test -z "$rm"; then $echo "$modename: you must specify an RM program" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE fi rmdirs= origobjdir="$objdir" for file in $files; do dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` if test "X$dir" = "X$file"; then dir=. objdir="$origobjdir" else objdir="$dir/$origobjdir" fi name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` test "$mode" = uninstall && objdir="$dir" # Remember objdir for removal later, being careful to avoid duplicates if test "$mode" = clean; then case " $rmdirs " in *" $objdir "*) ;; *) rmdirs="$rmdirs $objdir" ;; esac fi # Don't error if the file doesn't exist and rm -f was used. if (test -L "$file") >/dev/null 2>&1 \ || (test -h "$file") >/dev/null 2>&1 \ || test -f "$file"; then : elif test -d "$file"; then exit_status=1 continue elif test "$rmforce" = yes; then continue fi rmfiles="$file" case $name in *.la) # Possibly a libtool archive, so verify it. if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then . $dir/$name # Delete the libtool libraries and symlinks. for n in $library_names; do rmfiles="$rmfiles $objdir/$n" done test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" test "$mode" = clean && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" if test "$mode" = uninstall; then if test -n "$library_names"; then # Do each command in the postuninstall commands. cmds=$postuninstall_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" if test "$?" -ne 0 && test "$rmforce" != yes; then exit_status=1 fi done IFS="$save_ifs" fi if test -n "$old_library"; then # Do each command in the old_postuninstall commands. cmds=$old_postuninstall_cmds save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $show "$cmd" $run eval "$cmd" if test "$?" -ne 0 && test "$rmforce" != yes; then exit_status=1 fi done IFS="$save_ifs" fi # FIXME: should reinstall the best remaining shared library. fi fi ;; *.lo) # Possibly a libtool object, so verify it. if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then # Read the .lo file . $dir/$name # Add PIC object to the list of files to remove. if test -n "$pic_object" \ && test "$pic_object" != none; then rmfiles="$rmfiles $dir/$pic_object" fi # Add non-PIC object to the list of files to remove. if test -n "$non_pic_object" \ && test "$non_pic_object" != none; then rmfiles="$rmfiles $dir/$non_pic_object" fi fi ;; *) if test "$mode" = clean ; then noexename=$name case $file in *.exe) file=`$echo $file|${SED} 's,.exe$,,'` noexename=`$echo $name|${SED} 's,.exe$,,'` # $file with .exe has already been added to rmfiles, # add $file without .exe rmfiles="$rmfiles $file" ;; esac # Do a test to see if this is a libtool program. if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then relink_command= . $dir/$noexename # note $name still contains .exe if it was in $file originally # as does the version of $file that was added into $rmfiles rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" if test "$fast_install" = yes && test -n "$relink_command"; then rmfiles="$rmfiles $objdir/lt-$name" fi if test "X$noexename" != "X$name" ; then rmfiles="$rmfiles $objdir/lt-${noexename}.c" fi fi fi ;; esac $show "$rm $rmfiles" $run $rm $rmfiles || exit_status=1 done objdir="$origobjdir" # Try to remove the ${objdir}s in the directories where we deleted files for dir in $rmdirs; do if test -d "$dir"; then $show "rmdir $dir" $run rmdir $dir >/dev/null 2>&1 fi done exit $exit_status ;; "") $echo "$modename: you must specify a MODE" 1>&2 $echo "$generic_help" 1>&2 exit $EXIT_FAILURE ;; esac if test -z "$exec_cmd"; then $echo "$modename: invalid operation mode \`$mode'" 1>&2 $echo "$generic_help" 1>&2 exit $EXIT_FAILURE fi fi # test -z "$show_help" if test -n "$exec_cmd"; then eval exec $exec_cmd exit $EXIT_FAILURE fi # We need to display help for each of the modes. case $mode in "") $echo \ "Usage: $modename [OPTION]... [MODE-ARG]... Provide generalized library-building support services. --config show all configuration variables --debug enable verbose shell tracing -n, --dry-run display commands without modifying any files --features display basic configuration information and exit --finish same as \`--mode=finish' --help display this help message and exit --mode=MODE use operation mode MODE [default=inferred from MODE-ARGS] --quiet same as \`--silent' --silent don't print informational messages --tag=TAG use configuration variables from tag TAG --version print version information MODE must be one of the following: clean remove files from the build directory compile compile a source file into a libtool object execute automatically set library path, then run a program finish complete the installation of libtool libraries install install libraries or executables link create a library or an executable uninstall remove libraries from an installed directory MODE-ARGS vary depending on the MODE. Try \`$modename --help --mode=MODE' for a more detailed description of MODE. Report bugs to ." exit $EXIT_SUCCESS ;; clean) $echo \ "Usage: $modename [OPTION]... --mode=clean RM [RM-OPTION]... FILE... Remove files from the build directory. RM is the name of the program to use to delete files associated with each FILE (typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed to RM. If FILE is a libtool library, object or program, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; compile) $echo \ "Usage: $modename [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE Compile a source file into a libtool library object. This mode accepts the following additional options: -o OUTPUT-FILE set the output file name to OUTPUT-FILE -prefer-pic try to building PIC objects only -prefer-non-pic try to building non-PIC objects only -static always build a \`.o' file suitable for static linking COMPILE-COMMAND is a command to be used in creating a \`standard' object file from the given SOURCEFILE. The output file name is determined by removing the directory component from SOURCEFILE, then substituting the C source code suffix \`.c' with the library object suffix, \`.lo'." ;; execute) $echo \ "Usage: $modename [OPTION]... --mode=execute COMMAND [ARGS]... Automatically set library path, then run a program. This mode accepts the following additional options: -dlopen FILE add the directory containing FILE to the library path This mode sets the library path environment variable according to \`-dlopen' flags. If any of the ARGS are libtool executable wrappers, then they are translated into their corresponding uninstalled binary, and any of their required library directories are added to the library path. Then, COMMAND is executed, with ARGS as arguments." ;; finish) $echo \ "Usage: $modename [OPTION]... --mode=finish [LIBDIR]... Complete the installation of libtool libraries. Each LIBDIR is a directory that contains libtool libraries. The commands that this mode executes may require superuser privileges. Use the \`--dry-run' option if you just want to see what would be executed." ;; install) $echo \ "Usage: $modename [OPTION]... --mode=install INSTALL-COMMAND... Install executables or libraries. INSTALL-COMMAND is the installation command. The first component should be either the \`install' or \`cp' program. The rest of the components are interpreted as arguments to that command (only BSD-compatible install options are recognized)." ;; link) $echo \ "Usage: $modename [OPTION]... --mode=link LINK-COMMAND... Link object files or libraries together to form another library, or to create an executable program. LINK-COMMAND is a command using the C compiler that you would use to create a program from several object files. The following components of LINK-COMMAND are treated specially: -all-static do not do any dynamic linking at all -avoid-version do not add a version suffix if possible -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) -export-symbols SYMFILE try to export only the symbols listed in SYMFILE -export-symbols-regex REGEX try to export only the symbols matching REGEX -LLIBDIR search LIBDIR for required installed libraries -lNAME OUTPUT-FILE requires the installed library libNAME -module build a library that can dlopened -no-fast-install disable the fast-install mode -no-install link a not-installable executable -no-undefined declare that a library does not refer to external symbols -o OUTPUT-FILE create OUTPUT-FILE from the specified objects -objectlist FILE Use a list of object files found in FILE to specify objects -precious-files-regex REGEX don't remove output files matching REGEX -release RELEASE specify package release information -rpath LIBDIR the created library will eventually be installed in LIBDIR -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries -static do not do any dynamic linking of libtool libraries -version-info CURRENT[:REVISION[:AGE]] specify library version info [each variable defaults to 0] All other options (arguments beginning with \`-') are ignored. Every other argument is treated as a filename. Files ending in \`.la' are treated as uninstalled libtool libraries, other files are standard or library object files. If the OUTPUT-FILE ends in \`.la', then a libtool library is created, only library objects (\`.lo' files) may be specified, and \`-rpath' is required, except when creating a convenience library. If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created using \`ar' and \`ranlib', or on Windows using \`lib'. If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file is created, otherwise an executable program is created." ;; uninstall) $echo \ "Usage: $modename [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... Remove libraries from an installation directory. RM is the name of the program to use to delete files associated with each FILE (typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed to RM. If FILE is a libtool library, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; *) $echo "$modename: invalid operation mode \`$mode'" 1>&2 $echo "$help" 1>&2 exit $EXIT_FAILURE ;; esac $echo $echo "Try \`$modename --help' for more information about other modes." exit $EXIT_SUCCESS # The TAGs below are defined such that we never get into a situation # in which we disable both kinds of libraries. Given conflicting # choices, we go for a static library, that is the most portable, # since we can't tell whether shared libraries were disabled because # the user asked for that or because the platform doesn't support # them. This is particularly important on AIX, because we don't # support having both static and shared libraries enabled at the same # time on that platform, so we default to a shared-only configuration. # If a disable-shared tag is given, we'll fallback to a static-only # configuration. But we'll never go from static-only to shared-only. # ### BEGIN LIBTOOL TAG CONFIG: disable-shared build_libtool_libs=no build_old_libs=yes # ### END LIBTOOL TAG CONFIG: disable-shared # ### BEGIN LIBTOOL TAG CONFIG: disable-static build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac` # ### END LIBTOOL TAG CONFIG: disable-static # Local Variables: # mode:shell-script # sh-indentation:2 # End: sphinx-2.0.4-release/api/libsphinxclient/test.c0000644000176700017710000003051511711621267021046 0ustar deogardeogar// // $Id: test.c 3087 2012-01-30 23:07:35Z shodan $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU Library General Public License. You should // have received a copy of the LGPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #include #include #include #if _WIN32 #include #endif #include "sphinxclient.h" static sphinx_bool g_smoke = SPH_FALSE; static int g_failed = 0; void die ( const char * template, ... ) { va_list ap; va_start ( ap, template ); printf ( "FATAL: " ); vprintf ( template, ap ); printf ( "\n" ); va_end ( ap ); exit ( 1 ); } void net_init () { #if _WIN32 // init WSA on Windows WSADATA wsa_data; int wsa_startup_err; wsa_startup_err = WSAStartup ( WINSOCK_VERSION, &wsa_data ); if ( wsa_startup_err ) die ( "failed to initialize WinSock2: error %d", wsa_startup_err ); #endif } void test_query ( sphinx_client * client, const char * query, const char * index ) { sphinx_result * res; int i, j, k, mva_len; unsigned int * mva; const char * field_names[2]; int field_weights[2]; field_names[0] = "title"; field_names[1] = "content"; field_weights[0] = 100; field_weights[1] = 1; sphinx_set_field_weights ( client, 2, field_names, field_weights ); field_weights[0] = 1; field_weights[1] = 1; res = sphinx_query ( client, query, index, NULL ); if ( !res ) { g_failed += ( res==NULL ); if ( !g_smoke ) die ( "query failed: %s", sphinx_error(client) ); } if ( g_smoke ) printf ( "Query '%s' retrieved %d of %d matches.\n", query, res->total, res->total_found ); else printf ( "Query '%s' retrieved %d of %d matches in %d.%03d sec.\n", query, res->total, res->total_found, res->time_msec/1000, res->time_msec%1000 ); printf ( "Query stats:\n" ); for ( i=0; inum_words; i++ ) printf ( "\t'%s' found %d times in %d documents\n", res->words[i].word, res->words[i].hits, res->words[i].docs ); printf ( "\nMatches:\n" ); for ( i=0; inum_matches; i++ ) { printf ( "%d. doc_id=%d, weight=%d", 1+i, (int)sphinx_get_id ( res, i ), sphinx_get_weight ( res, i ) ); for ( j=0; jnum_attrs; j++ ) { printf ( ", %s=", res->attr_names[j] ); switch ( res->attr_types[j] ) { case SPH_ATTR_MULTI64: case SPH_ATTR_MULTI: mva = sphinx_get_mva ( res, i, j ); mva_len = *mva++; printf ( "(" ); for ( k=0; kattr_types[j]==SPH_ATTR_MULTI ? mva[k] : (unsigned int)sphinx_get_mva64_value ( mva, k ) ) ); printf ( ")" ); break; case SPH_ATTR_FLOAT: printf ( "%f", sphinx_get_float ( res, i, j ) ); break; case SPH_ATTR_STRING: printf ( "%s", sphinx_get_string ( res, i, j ) ); break; default: printf ( "%u", (unsigned int)sphinx_get_int ( res, i, j ) ); break; } } printf ( "\n" ); } printf ( "\n" ); } void test_excerpt ( sphinx_client * client ) { const char * docs[] = { "this is my test text to be highlighted, and for the sake of the testing we need to pump its length somewhat", "another test text to be highlighted, below limit", "test number three, without phrase match", "final test, not only without phrase match, but also above limit and with swapped phrase text test as well" }; const int ndocs = sizeof(docs)/sizeof(docs[0]); const char * words = "test text"; const char * index = "test1"; sphinx_excerpt_options opts; char ** res; int i, j; sphinx_init_excerpt_options ( &opts ); opts.limit = 60; opts.around = 3; opts.allow_empty = SPH_FALSE; for ( j=0; j<2; j++ ) { opts.exact_phrase = j; printf ( "exact_phrase=%d\n", j ); res = sphinx_build_excerpts ( client, ndocs, docs, index, words, &opts ); if ( !res ) { g_failed += ( res==NULL ); if ( !g_smoke ) die ( "query failed: %s", sphinx_error(client) ); } for ( i=0; i" "The institutional investment manager it. Is Filing this report and." "" "It is signed hereby represent. That it is all information." "are It or is" "" "" "cool It is cooler" "" "It is another place!" "" }; const int ndocs = sizeof(docs)/sizeof(docs[0]); const char * words = "it is"; const char * index = "test1"; sphinx_excerpt_options opts; char ** res; int i, j; sphinx_init_excerpt_options ( &opts ); opts.limit = 150; opts.limit_passages = 8; opts.around = 8; opts.html_strip_mode = "strip"; opts.passage_boundary = "zone"; opts.emit_zones = SPH_TRUE; for ( j=0; j<2; j++ ) { if ( j==1 ) { opts.passage_boundary = "sentence"; opts.emit_zones = SPH_FALSE; } printf ( "passage_boundary=%s\n", opts.passage_boundary ); res = sphinx_build_excerpts ( client, ndocs, docs, index, words, &opts ); if ( !res ) die ( "query failed: %s", sphinx_error(client) ); for ( i=0; i % s <-\n\n", name ); } int main ( int argc, char ** argv ) { int i, port = 0; sphinx_client * client; sphinx_uint64_t override_docid = 2; unsigned int override_value = 2000; for ( i=1; i$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && cd $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e '1{h;s/./=/g;p;x;}' -e '$${p;x;}' distuninstallcheck: @cd $(distuninstallcheck_dir) \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(HEADERS) \ sphinxclient_config.h installdirs: for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(includedir)"; do \ test -z "$$dir" || $(mkdir_p) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \ clean-noinstPROGRAMS mostlyclean-am distclean: distclean-am -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-hdr distclean-libtool distclean-tags dvi: dvi-am dvi-am: html: html-am info: info-am info-am: install-data-am: install-includeHEADERS install-exec-am: install-libLTLIBRARIES install-info: install-info-am install-man: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-includeHEADERS uninstall-info-am \ uninstall-libLTLIBRARIES .PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \ clean-generic clean-libLTLIBRARIES clean-libtool \ clean-noinstPROGRAMS ctags dist dist-all dist-bzip2 dist-gzip \ dist-shar dist-tarZ dist-zip distcheck distclean \ distclean-compile distclean-generic distclean-hdr \ distclean-libtool distclean-tags distcleancheck distdir \ distuninstallcheck dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-exec \ install-exec-am install-includeHEADERS install-info \ install-info-am install-libLTLIBRARIES install-man \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags uninstall uninstall-am \ uninstall-includeHEADERS uninstall-info-am \ uninstall-libLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: sphinx-2.0.4-release/api/libsphinxclient/smoke_test.conf0000644000176700017710000000212411706371553022746 0ustar deogardeogar source src1 { type = mysql sql_host = localhost sql_user = test sql_pass = sql_db = test sql_port = 3306 # optional, default is 3306 sql_query = SELECT id, id as idd, group_id, title, content FROM documents sql_attr_uint = group_id sql_attr_uint = idd sql_attr_multi = uint tag from query; SELECT docid, tagid FROM tags sql_attr_multi = bigint tag64 from query; SELECT docid, tagid FROM tags sql_attr_multi = uint tag2 from query; SELECT docid, tagid FROM tags } index test1 { source = src1 path = ../../test/data/test1 docinfo = extern charset_type = utf-8 } index dist { type = distributed agent = 127.0.0.1:10312:test1 agent_connect_timeout = 1000 agent_query_timeout = 3000 } indexer { mem_limit = 32M } searchd { listen = 10312 listen = 10306:mysql41 read_timeout = 5 max_children = 30 pid_file = searchd.pid log = ../../test/searchd.log query_log = ../../test/query.log max_matches = 1000 workers = threads # for RT to work binlog_path = max_packet_size = 16M } sphinx-2.0.4-release/api/libsphinxclient/test03.vcproj0000644000176700017710000000664511424366077022307 0ustar deogardeogar sphinx-2.0.4-release/api/libsphinxclient/test.sln0000644000176700017710000000266511356430305021421 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 9.00 # Visual Studio 2005 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test", "test.vcproj", "{AE8DBF77-DE4F-41E4-96F9-456D8DA6418C}" ProjectSection(ProjectDependencies) = postProject {E0393ED6-FE6B-4803-8BFD-9D79EF21603A} = {E0393ED6-FE6B-4803-8BFD-9D79EF21603A} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libsphinxclient", "libsphinxclient.vcproj", "{E0393ED6-FE6B-4803-8BFD-9D79EF21603A}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 Release|Win32 = Release|Win32 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {AE8DBF77-DE4F-41E4-96F9-456D8DA6418C}.Debug|Win32.ActiveCfg = Debug|Win32 {AE8DBF77-DE4F-41E4-96F9-456D8DA6418C}.Debug|Win32.Build.0 = Debug|Win32 {AE8DBF77-DE4F-41E4-96F9-456D8DA6418C}.Release|Win32.ActiveCfg = Release|Win32 {AE8DBF77-DE4F-41E4-96F9-456D8DA6418C}.Release|Win32.Build.0 = Release|Win32 {E0393ED6-FE6B-4803-8BFD-9D79EF21603A}.Debug|Win32.ActiveCfg = Debug|Win32 {E0393ED6-FE6B-4803-8BFD-9D79EF21603A}.Debug|Win32.Build.0 = Debug|Win32 {E0393ED6-FE6B-4803-8BFD-9D79EF21603A}.Release|Win32.ActiveCfg = Release|Win32 {E0393ED6-FE6B-4803-8BFD-9D79EF21603A}.Release|Win32.Build.0 = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sphinx-2.0.4-release/api/libsphinxclient/COPYING0000644000176700017710000006127411037123041020750 0ustar deogardeogar GNU LIBRARY GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1991 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the library GPL. It is numbered 2 because it goes with version 2 of the ordinary GPL.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Library General Public License, applies to some specially designated Free Software Foundation software, and to any other libraries whose authors decide to use it. You can use it for your libraries, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library, or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link a program with the library, you must provide complete object files to the recipients so that they can relink them with the library, after making changes to the library and recompiling it. And you must show them these terms so they know their rights. Our method of protecting your rights has two steps: (1) copyright the library, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the library. Also, for each distributor's protection, we want to make certain that everyone understands that there is no warranty for this free library. If the library is modified by someone else and passed on, we want its recipients to know that what they have is not the original version, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that companies distributing free software will individually obtain patent licenses, thus in effect transforming the program into proprietary software. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License, which was designed for utility programs. This license, the GNU Library General Public License, applies to certain designated libraries. This license is quite different from the ordinary one; be sure to read it in full, and don't assume that anything in it is the same as in the ordinary license. The reason we have a separate public license for some libraries is that they blur the distinction we usually make between modifying or adding to a program and simply using it. Linking a program with a library, without changing the library, is in some sense simply using the library, and is analogous to running a utility program or application program. However, in a textual and legal sense, the linked executable is a combined work, a derivative of the original library, and the ordinary General Public License treats it as such. Because of this blurred distinction, using the ordinary General Public License for libraries did not effectively promote software sharing, because most developers did not use the libraries. We concluded that weaker conditions might promote sharing better. However, unrestricted linking of non-free programs would deprive the users of those programs of all benefit from the free status of the libraries themselves. This Library General Public License is intended to permit developers of non-free programs to use free libraries, while preserving your freedom as a user of such programs to change the free libraries that are incorporated in them. (We have not seen how to achieve this as regards changes in header files, but we have achieved it as regards changes in the actual functions of the Library.) The hope is that this will lead to faster development of free libraries. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, while the latter only works together with the library. Note that it is possible for a library to be covered by the ordinary General Public License rather than by this special one. GNU LIBRARY GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Library General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also compile or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. c) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. d) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Library General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along with this library; if not, write to the Free Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! sphinx-2.0.4-release/api/libsphinxclient/install-sh0000755000176700017710000002176611102461317021727 0ustar deogardeogar#!/bin/sh # install - install a program, script, or datafile scriptversion=2004-09-10.20 # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the # following copyright and license. # # Copyright (C) 1994 X Consortium # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN # AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- # TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # Except as contained in this notice, the name of the X Consortium shall not # be used in advertising or otherwise to promote the sale, use or other deal- # ings in this Software without prior written authorization from the X Consor- # tium. # # # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent # `make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. It can only install one file at a time, a restriction # shared with many OS's install programs. # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. doit="${DOITPROG-}" # put in absolute paths if you don't have them in your path; or use env. vars. mvprog="${MVPROG-mv}" cpprog="${CPPROG-cp}" chmodprog="${CHMODPROG-chmod}" chownprog="${CHOWNPROG-chown}" chgrpprog="${CHGRPPROG-chgrp}" stripprog="${STRIPPROG-strip}" rmprog="${RMPROG-rm}" mkdirprog="${MKDIRPROG-mkdir}" chmodcmd="$chmodprog 0755" chowncmd= chgrpcmd= stripcmd= rmcmd="$rmprog -f" mvcmd="$mvprog" src= dst= dir_arg= dstarg= no_target_directory= usage="Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... In the 1st form, copy SRCFILE to DSTFILE. In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. In the 4th, create DIRECTORIES. Options: -c (ignored) -d create directories instead of installing files. -g GROUP $chgrpprog installed files to GROUP. -m MODE $chmodprog installed files to MODE. -o USER $chownprog installed files to USER. -s $stripprog installed files. -t DIRECTORY install into DIRECTORY. -T report an error if DSTFILE is a directory. --help display this help and exit. --version display version info and exit. Environment variables override the default commands: CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG " while test -n "$1"; do case $1 in -c) shift continue;; -d) dir_arg=true shift continue;; -g) chgrpcmd="$chgrpprog $2" shift shift continue;; --help) echo "$usage"; exit 0;; -m) chmodcmd="$chmodprog $2" shift shift continue;; -o) chowncmd="$chownprog $2" shift shift continue;; -s) stripcmd=$stripprog shift continue;; -t) dstarg=$2 shift shift continue;; -T) no_target_directory=true shift continue;; --version) echo "$0 $scriptversion"; exit 0;; *) # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. test -n "$dir_arg$dstarg" && break # Otherwise, the last argument is the destination. Remove it from $@. for arg do if test -n "$dstarg"; then # $@ is not empty: it contains at least $arg. set fnord "$@" "$dstarg" shift # fnord fi shift # arg dstarg=$arg done break;; esac done if test -z "$1"; then if test -z "$dir_arg"; then echo "$0: no input file specified." >&2 exit 1 fi # It's OK to call `install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi for src do # Protect names starting with `-'. case $src in -*) src=./$src ;; esac if test -n "$dir_arg"; then dst=$src src= if test -d "$dst"; then mkdircmd=: chmodcmd= else mkdircmd=$mkdirprog fi else # Waiting for this to be detected by the "$cpprog $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if test ! -f "$src" && test ! -d "$src"; then echo "$0: $src does not exist." >&2 exit 1 fi if test -z "$dstarg"; then echo "$0: no destination specified." >&2 exit 1 fi dst=$dstarg # Protect names starting with `-'. case $dst in -*) dst=./$dst ;; esac # If destination is a directory, append the input filename; won't work # if double slashes aren't ignored. if test -d "$dst"; then if test -n "$no_target_directory"; then echo "$0: $dstarg: Is a directory" >&2 exit 1 fi dst=$dst/`basename "$src"` fi fi # This sed command emulates the dirname command. dstdir=`echo "$dst" | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'` # Make sure that the destination directory exists. # Skip lots of stat calls in the usual case. if test ! -d "$dstdir"; then defaultIFS=' ' IFS="${IFS-$defaultIFS}" oIFS=$IFS # Some sh's can't handle IFS=/ for some reason. IFS='%' set - `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'` IFS=$oIFS pathcomp= while test $# -ne 0 ; do pathcomp=$pathcomp$1 shift if test ! -d "$pathcomp"; then $mkdirprog "$pathcomp" # mkdir can fail with a `File exist' error in case several # install-sh are creating the directory concurrently. This # is OK. test -d "$pathcomp" || exit fi pathcomp=$pathcomp/ done fi if test -n "$dir_arg"; then $doit $mkdircmd "$dst" \ && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dst"; } else dstfile=`basename "$dst"` # Make a couple of temp file names in the proper directory. dsttmp=$dstdir/_inst.$$_ rmtmp=$dstdir/_rm.$$_ # Trap to clean up those temp files at exit. trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 trap '(exit $?); exit' 1 2 13 15 # Copy the file name to the temp name. $doit $cpprog "$src" "$dsttmp" && # and set any options; do chmod last to preserve setuid bits. # # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dsttmp"; } && # Now rename the file to the real destination. { $doit $mvcmd -f "$dsttmp" "$dstdir/$dstfile" 2>/dev/null \ || { # The rename failed, perhaps because mv can't rename something else # to itself, or perhaps because mv is so ancient that it does not # support -f. # Now remove or move aside any old file at destination location. # We try this two ways since rm can't unlink itself on some # systems and the destination file might be busy for other # reasons. In this case, the final cleanup might fail but the new # file should still install successfully. { if test -f "$dstdir/$dstfile"; then $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null \ || $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null \ || { echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2 (exit 1); exit } else : fi } && # Now rename the file to the real destination. $doit $mvcmd "$dsttmp" "$dstdir/$dstfile" } } fi || { (exit 1); exit; } done # The final little trick to "correctly" pass the exit status to the exit trap. { (exit 0); exit } # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: sphinx-2.0.4-release/api/libsphinxclient/sphinxclient.h0000644000176700017710000002076511723657702022620 0ustar deogardeogar// // $Id: sphinxclient.h 3132 2012-03-01 11:38:42Z klirichek $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU Library General Public License. You should // have received a copy of the LGPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifndef _sphinxclient_ #define _sphinxclient_ #ifdef __cplusplus extern "C" { #endif /// known searchd status codes enum { SEARCHD_OK = 0, SEARCHD_ERROR = 1, SEARCHD_RETRY = 2, SEARCHD_WARNING = 3 }; /// known match modes enum { SPH_MATCH_ALL = 0, SPH_MATCH_ANY = 1, SPH_MATCH_PHRASE = 2, SPH_MATCH_BOOLEAN = 3, SPH_MATCH_EXTENDED = 4, SPH_MATCH_FULLSCAN = 5, SPH_MATCH_EXTENDED2 = 6 }; /// known ranking modes (ext2 only) enum { SPH_RANK_PROXIMITY_BM25 = 0, SPH_RANK_BM25 = 1, SPH_RANK_NONE = 2, SPH_RANK_WORDCOUNT = 3, SPH_RANK_PROXIMITY = 4, SPH_RANK_MATCHANY = 5, SPH_RANK_FIELDMASK = 6, SPH_RANK_SPH04 = 7, SPH_RANK_EXPR = 8, SPH_RANK_TOTAL = 9, SPH_RANK_DEFAULT = SPH_RANK_PROXIMITY_BM25 }; /// known sort modes enum { SPH_SORT_RELEVANCE = 0, SPH_SORT_ATTR_DESC = 1, SPH_SORT_ATTR_ASC = 2, SPH_SORT_TIME_SEGMENTS = 3, SPH_SORT_EXTENDED = 4, SPH_SORT_EXPR = 5 }; /// known filter types enum { SPH_FILTER_VALUES = 0, SPH_FILTER_RANGE = 1, SPH_FILTER_FLOATRANGE = 2 }; /// known attribute types enum { SPH_ATTR_INTEGER = 1, SPH_ATTR_TIMESTAMP = 2, SPH_ATTR_ORDINAL = 3, SPH_ATTR_BOOL = 4, SPH_ATTR_FLOAT = 5, SPH_ATTR_BIGINT = 6, SPH_ATTR_STRING = 7, SPH_ATTR_MULTI = 0x40000001UL, SPH_ATTR_MULTI64 = 0x40000002UL }; /// known grouping functions enum { SPH_GROUPBY_DAY = 0, SPH_GROUPBY_WEEK = 1, SPH_GROUPBY_MONTH = 2, SPH_GROUPBY_YEAR = 3, SPH_GROUPBY_ATTR = 4, SPH_GROUPBY_ATTRPAIR = 5 }; ////////////////////////////////////////////////////////////////////////// #if defined(_MSC_VER) typedef __int64 sphinx_int64_t; typedef unsigned __int64 sphinx_uint64_t; #else // !defined(_MSC_VER) typedef long long sphinx_int64_t; typedef unsigned long long sphinx_uint64_t; #endif // !defined(_MSC_VER) typedef int sphinx_bool; #define SPH_TRUE 1 #define SPH_FALSE 0 ////////////////////////////////////////////////////////////////////////// typedef struct st_sphinx_client sphinx_client; typedef struct st_sphinx_wordinfo { const char * word; int docs; int hits; } sphinx_wordinfo; typedef struct st_sphinx_result { const char * error; const char * warning; int status; int num_fields; char ** fields; int num_attrs; char ** attr_names; int * attr_types; int num_matches; void * values_pool; int total; int total_found; int time_msec; int num_words; sphinx_wordinfo * words; } sphinx_result; typedef struct st_sphinx_excerpt_options { const char * before_match; const char * after_match; const char * chunk_separator; const char * html_strip_mode; const char * passage_boundary; int limit; int limit_passages; int limit_words; int around; int start_passage_id; sphinx_bool exact_phrase; sphinx_bool single_passage; sphinx_bool use_boundaries; sphinx_bool weight_order; sphinx_bool query_mode; sphinx_bool force_all_words; sphinx_bool load_files; sphinx_bool allow_empty; sphinx_bool emit_zones; } sphinx_excerpt_options; typedef struct st_sphinx_keyword_info { char * tokenized; char * normalized; int num_docs; int num_hits; } sphinx_keyword_info; ////////////////////////////////////////////////////////////////////////// sphinx_client * sphinx_create ( sphinx_bool copy_args ); void sphinx_cleanup ( sphinx_client * client ); void sphinx_destroy ( sphinx_client * client ); const char * sphinx_error ( sphinx_client * client ); const char * sphinx_warning ( sphinx_client * client ); sphinx_bool sphinx_set_server ( sphinx_client * client, const char * host, int port ); sphinx_bool sphinx_set_connect_timeout ( sphinx_client * client, float seconds ); sphinx_bool sphinx_open ( sphinx_client * client ); sphinx_bool sphinx_close ( sphinx_client * client ); sphinx_bool sphinx_set_limits ( sphinx_client * client, int offset, int limit, int max_matches, int cutoff ); sphinx_bool sphinx_set_max_query_time ( sphinx_client * client, int max_query_time ); sphinx_bool sphinx_set_match_mode ( sphinx_client * client, int mode ); sphinx_bool sphinx_set_ranking_mode ( sphinx_client * client, int ranker, const char * rankexpr ); sphinx_bool sphinx_set_sort_mode ( sphinx_client * client, int mode, const char * sortby ); sphinx_bool sphinx_set_field_weights ( sphinx_client * client, int num_weights, const char ** field_names, const int * field_weights ); sphinx_bool sphinx_set_index_weights ( sphinx_client * client, int num_weights, const char ** index_names, const int * index_weights ); sphinx_bool sphinx_set_id_range ( sphinx_client * client, sphinx_uint64_t minid, sphinx_uint64_t maxid ); sphinx_bool sphinx_add_filter ( sphinx_client * client, const char * attr, int num_values, const sphinx_int64_t * values, sphinx_bool exclude ); sphinx_bool sphinx_add_filter_range ( sphinx_client * client, const char * attr, sphinx_int64_t umin, sphinx_int64_t umax, sphinx_bool exclude ); sphinx_bool sphinx_add_filter_float_range ( sphinx_client * client, const char * attr, float fmin, float fmax, sphinx_bool exclude ); sphinx_bool sphinx_set_geoanchor ( sphinx_client * client, const char * attr_latitude, const char * attr_longitude, float latitude, float longitude ); sphinx_bool sphinx_set_groupby ( sphinx_client * client, const char * attr, int groupby_func, const char * group_sort ); sphinx_bool sphinx_set_groupby_distinct ( sphinx_client * client, const char * attr ); sphinx_bool sphinx_set_retries ( sphinx_client * client, int count, int delay ); sphinx_bool sphinx_add_override ( sphinx_client * client, const char * attr, const sphinx_uint64_t * docids, int num_values, const unsigned int * values ); sphinx_bool sphinx_set_select ( sphinx_client * client, const char * select_list ); void sphinx_reset_filters ( sphinx_client * client ); void sphinx_reset_groupby ( sphinx_client * client ); sphinx_result * sphinx_query ( sphinx_client * client, const char * query, const char * index_list, const char * comment ); int sphinx_add_query ( sphinx_client * client, const char * query, const char * index_list, const char * comment ); sphinx_result * sphinx_run_queries ( sphinx_client * client ); int sphinx_get_num_results ( sphinx_client * client ); sphinx_uint64_t sphinx_get_id ( sphinx_result * result, int match ); int sphinx_get_weight ( sphinx_result * result, int match ); sphinx_int64_t sphinx_get_int ( sphinx_result * result, int match, int attr ); float sphinx_get_float ( sphinx_result * result, int match, int attr ); unsigned int * sphinx_get_mva ( sphinx_result * result, int match, int attr ); sphinx_uint64_t sphinx_get_mva64_value ( unsigned int * mva, int i ); const char * sphinx_get_string ( sphinx_result * result, int match, int attr ); void sphinx_init_excerpt_options ( sphinx_excerpt_options * opts ); char ** sphinx_build_excerpts ( sphinx_client * client, int num_docs, const char ** docs, const char * index, const char * words, sphinx_excerpt_options * opts ); int sphinx_update_attributes ( sphinx_client * client, const char * index, int num_attrs, const char ** attrs, int num_docs, const sphinx_uint64_t * docids, const sphinx_int64_t * values ); int sphinx_update_attributes_mva ( sphinx_client * client, const char * index, const char * attr, sphinx_uint64_t docid, int num_values, const unsigned int * values ); sphinx_keyword_info * sphinx_build_keywords ( sphinx_client * client, const char * query, const char * index, sphinx_bool hits, int * out_num_keywords ); char ** sphinx_status ( sphinx_client * client, int * num_rows, int * num_cols ); void sphinx_status_destroy ( char ** status, int num_rows, int num_cols ); ///////////////////////////////////////////////////////////////////////////// #ifdef __cplusplus } #endif #endif // _sphinxclient_ // // $Id: sphinxclient.h 3132 2012-03-01 11:38:42Z klirichek $ // sphinx-2.0.4-release/api/libsphinxclient/smoke_ref.txt0000644000176700017710000002360111707456735022447 0ustar deogardeogarexact_phrase=0 n=1, res=this is my test text to be highlighted ... n=2, res=another test text to be highlighted, below limit n=3, res=test number three, without phrase match n=4, res=final test, not only without ... with swapped phrase text test as well exact_phrase=1 n=1, res=this is my test text to be highlighted ... n=2, res=another test text to be highlighted, below limit n=3, res=test number three, without phrase match n=4, res=final test, not only without phrase match, but also above ... passage_boundary=zone n=1, res= ... manager it. Is Filing this report and. It is signed hereby represent. That it is all information.are It or is cool It is cooler It is another ... passage_boundary=sentence n=1, res= ... The institutional investment manager it. ... Is Filing this report and. ... It is signed hereby represent. ... That it is all information.are It or is cool It is cooler It is another place! ... build_keywords result: 1. tokenized=hello, normalized=hello, docs=0, hits=0 2. tokenized=test, normalized=test, docs=3, hits=5 3. tokenized=one, normalized=one, docs=1, hits=2 Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) 3. doc_id=3, weight=1304, idd=3, group_id=2, tag=(15), tag64=(15), tag2=(15) 4. doc_id=4, weight=1304, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) Query 'is test' retrieved 3 of 3 matches. Query stats: 'is' found 4 times in 4 documents 'test' found 5 times in 3 documents Matches: 1. doc_id=1, weight=101362, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=2, weight=101362, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) 3. doc_id=4, weight=1373, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) Query 'test number' retrieved 3 of 3 matches. Query stats: 'test' found 5 times in 3 documents 'number' found 3 times in 3 documents Matches: 1. doc_id=4, weight=101442, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) 2. doc_id=1, weight=101432, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 3. doc_id=2, weight=101432, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) 3. doc_id=3, weight=1304, idd=3, group_id=2, tag=(15), tag64=(15), tag2=(15) 4. doc_id=4, weight=1304, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) Query 'is' retrieved 2 of 2 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=1, @count=2 2. doc_id=3, weight=1304, idd=3, group_id=2, tag=(15), tag64=(15), tag2=(15), @groupby=2, @count=2 Query 'is' retrieved 9 of 9 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=1, @count=1 2. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=2, @count=1 3. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=3, @count=1 4. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=4, @count=1 5. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=5, @count=1 6. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=6, @count=1 7. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=7, @count=2 8. doc_id=3, weight=1304, idd=3, group_id=2, tag=(15), tag64=(15), tag2=(15), @groupby=15, @count=1 9. doc_id=4, weight=1304, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40), @groupby=40, @count=1 Query 'is' retrieved 2 of 2 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=2, weight=1304, idd=2, group_id=1, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) Query 'is' retrieved 2 of 2 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=4, weight=1304, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) update success, 1 rows updated update mva success, 1 rows updated Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=4, weight=1304, idd=4, group_id=2, tag=(7,40), tag64=(7,40), tag2=(7,40) 2. doc_id=3, weight=1304, idd=3, group_id=2, tag=(7,77,177), tag64=(15), tag2=(15) 3. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) 4. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) update success, 1 rows updated update success, 1 rows updated Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6) 3. doc_id=3, weight=1304, idd=3, group_id=123, tag=(7,77,177), tag64=(15), tag2=(15) 4. doc_id=4, weight=1304, idd=4, group_id=123, tag=(7,40), tag64=(7,40), tag2=(7,40) Query 'is' retrieved 2 of 2 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=1, @count=1 2. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=123, @count=3 Query 'is' retrieved 10 of 10 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=1, @count=1 2. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=2, @count=1 3. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=3, @count=1 4. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=4, @count=1 5. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=5, @count=1 6. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), @groupby=6, @count=1 7. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), @groupby=7, @count=3 8. doc_id=4, weight=1304, idd=4, group_id=123, tag=(7,40), tag64=(7,40), tag2=(7,40), @groupby=40, @count=1 9. doc_id=3, weight=1304, idd=3, group_id=123, tag=(7,77,177), tag64=(15), tag2=(15), @groupby=77, @count=1 10. doc_id=3, weight=1304, idd=3, group_id=123, tag=(7,77,177), tag64=(15), tag2=(15), @groupby=177, @count=1 Query 'is' retrieved 1 of 1 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) Query 'is' retrieved 3 of 3 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7) 2. doc_id=3, weight=1304, idd=3, group_id=123, tag=(7,77,177), tag64=(15), tag2=(15) 3. doc_id=4, weight=1304, idd=4, group_id=123, tag=(7,40), tag64=(7,40), tag2=(7,40) Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, group_id=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), q=1010 2. doc_id=2, weight=1304, idd=2, group_id=123, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), q=123020 3. doc_id=3, weight=1304, idd=3, group_id=123, tag=(7,77,177), tag64=(15), tag2=(15), q=123030 4. doc_id=4, weight=1304, idd=4, group_id=123, tag=(7,40), tag64=(7,40), tag2=(7,40), q=123040 Query 'is' retrieved 4 of 4 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), group_id=1, q=1010 2. doc_id=2, weight=1304, idd=2, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), group_id=2000, q=2000020 3. doc_id=3, weight=1304, idd=3, tag=(7,77,177), tag64=(15), tag2=(15), group_id=123, q=123030 4. doc_id=4, weight=1304, idd=4, tag=(7,40), tag64=(7,40), tag2=(7,40), group_id=123, q=123040 Query 'is' retrieved 3 of 3 matches. Query stats: 'is' found 4 times in 4 documents Matches: 1. doc_id=1, weight=1304, idd=1, tag=(1,3,5,7), tag64=(1,3,5,7), tag2=(1,3,5,7), group_id=1, q=1010, @groupby=1, @count=1 2. doc_id=3, weight=1304, idd=3, tag=(7,77,177), tag64=(15), tag2=(15), group_id=123, q=123030, @groupby=123, @count=2 3. doc_id=2, weight=1304, idd=2, tag=(2,4,6), tag64=(2,4,6), tag2=(2,4,6), group_id=2000, q=2000020, @groupby=2000, @count=1 connections: 19 maxed_out: 0 command_search: 18 command_excerpt: 4 command_update: 4 command_keywords: 1 command_persist: 1 command_status: 1 command_flushattrs: 0 agent_connect: 1 agent_retry: 0 queries: 18 dist_queries: 1 ag_dist_0_connect_failures: 0 ag_dist_0_network_errors: 0 ag_dist_0_wrong_replies: 0 ag_dist_0_unexpected_closings: 0 query_cpu: OFF dist_local: 0.000 query_reads: OFF query_readkb: OFF avg_query_cpu: OFF avg_dist_local: 0.000 avg_query_reads: OFF avg_query_readkb: OFF n=0, query failed: response length out of bounds (len=8388666) n=1, res=that is no need to worry about ... sphinx-2.0.4-release/api/libsphinxclient/buildconf.sh0000644000176700017710000000120011034764367022220 0ustar deogardeogar#!/bin/sh eval `grep '^EXTRA_VERSION=' configure.in` case "$EXTRA_VERSION" in *-dev) rebuildok=1 ;; *) rebuildok=0 ;; esac cvsclean=0 while test $# -gt 0; do if test "$1" = "--force"; then rebuildok=1 echo "Forcing buildconf" fi if test "$1" = "--clean"; then cvsclean=1 fi shift done if test "$rebuildok" = "0"; then echo "You should not run buildconf in a release package." echo "use buildconf --force to override this check." exit 1 fi if test "$cvsclean" = "1"; then echo "Cleaning autogenerated files" ${MAKE:-make} -s -f build.mk cvsclean else ${MAKE:-make} -s -f build.mk fi sphinx-2.0.4-release/api/libsphinxclient/config.sub0000755000176700017710000007301511102461317021700 0ustar deogardeogar#! /bin/sh # Configuration validation subroutine script. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, # 2000, 2001, 2002, 2003 Free Software Foundation, Inc. timestamp='2003-06-18' # This file is (in principle) common to ALL GNU software. # The presence of a machine in this file suggests that SOME GNU software # can handle that machine. It does not imply ALL GNU software can. # # This file is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Please send patches to . Submit a context # diff and a properly formatted ChangeLog entry. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. # Each package is responsible for reporting which valid configurations # it does not support. The user should be able to distinguish # a failure to support a valid configuration from a meaningless # configuration. # The goal of this file is to map all the various variations of a given # machine specification into a single specification in the form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or in some cases, the newer four-part form: # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # It is wrong to echo any other type of specification. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] CPU-MFR-OPSYS $0 [OPTION] ALIAS Canonicalize a configuration name. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.sub ($timestamp) Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit 0 ;; --version | -v ) echo "$version" ; exit 0 ;; --help | --h* | -h ) echo "$usage"; exit 0 ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" exit 1 ;; *local*) # First pass through any local machine types. echo $1 exit 0;; * ) break ;; esac done case $# in 0) echo "$me: missing argument$help" >&2 exit 1;; 1) ;; *) echo "$me: too many arguments$help" >&2 exit 1;; esac # Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | freebsd*-gnu* | netbsd*-gnu* | storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] then os=`echo $1 | sed 's/.*-/-/'` else os=; fi ;; esac ### Let's recognize common machines as not being operating systems so ### that things like config.sub decstation-3100 work. We also ### recognize some manufacturers as not being operating systems, so we ### can provide default operating systems below. case $os in -sun*os*) # Prevent following clause from handling this invalid input. ;; -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ -apple | -axis) os= basic_machine=$1 ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 ;; -scout) ;; -wrs) os=-vxworks basic_machine=$1 ;; -chorusos*) os=-chorusos basic_machine=$1 ;; -chorusrdb) os=-chorusrdb basic_machine=$1 ;; -hiux*) os=-hiuxwe2 ;; -sco5) os=-sco3.2v5 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco4) os=-sco3.2v4 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2.[4-9]*) os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2v[4-9]*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco*) os=-sco3.2v2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -udk*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -isc) os=-isc2.2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -clix*) basic_machine=clipper-intergraph ;; -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -lynx*) os=-lynxos ;; -ptx*) basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ;; -windowsnt*) os=`echo $os | sed -e 's/windowsnt/winnt/'` ;; -psos*) os=-psos ;; -mint | -mint[0-9]*) basic_machine=m68k-atari os=-mint ;; esac # Decode aliases for certain CPU-COMPANY combinations. case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \ | c4x | clipper \ | d10v | d30v | dlx | dsp16xx \ | fr30 | frv \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | i370 | i860 | i960 | ia64 \ | ip2k \ | m32r | m68000 | m68k | m88k | mcore \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ | mips64vr | mips64vrel \ | mips64orion | mips64orionel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ | mipsisa64 | mipsisa64el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | msp430 \ | ns16k | ns32k \ | openrisc | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \ | pyramid \ | s390 | s390x \ | sh | sh[1234] | sh[23]e | sh[34]eb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc86x | sparclet | sparclite | sparcv9 | sparcv9b \ | strongarm \ | tahoe | thumb | tic4x | tic80 | tron \ | v850 | v850e \ | we32k \ | x86 | xscale | xstormy16 | xtensa \ | z8k) basic_machine=$basic_machine-unknown ;; m6811 | m68hc11 | m6812 | m68hc12) # Motorola 68HC11/12. basic_machine=$basic_machine-unknown os=-none ;; m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ;; # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. i*86 | x86_64) basic_machine=$basic_machine-pc ;; # Object if more than one company name word. *-*-*) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* \ | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \ | clipper-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* \ | m32r-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ | m88110-* | m88k-* | mcore-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ | mips64vr-* | mips64vrel-* \ | mips64orion-* | mips64orionel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ | mipstx39-* | mipstx39el-* \ | msp430-* \ | none-* | np1-* | nv1-* | ns16k-* | ns32k-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \ | pyramid-* \ | romp-* | rs6000-* \ | s390-* | s390x-* \ | sh-* | sh[1234]-* | sh[23]e-* | sh[34]eb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc86x-* | sparclet-* | sparclite-* \ | sparcv9-* | sparcv9b-* | strongarm-* | sv1-* | sx?-* \ | tahoe-* | thumb-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ | tron-* \ | v850-* | v850e-* | vax-* \ | we32k-* \ | x86-* | x86_64-* | xps100-* | xscale-* | xstormy16-* \ | xtensa-* \ | ymp-* \ | z8k-*) ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. 386bsd) basic_machine=i386-unknown os=-bsd ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; a29khif) basic_machine=a29k-amd os=-udi ;; adobe68k) basic_machine=m68010-adobe os=-scout ;; alliant | fx80) basic_machine=fx80-alliant ;; altos | altos3068) basic_machine=m68k-altos ;; am29k) basic_machine=a29k-none os=-bsd ;; amd64) basic_machine=x86_64-pc ;; amdahl) basic_machine=580-amdahl os=-sysv ;; amiga | amiga-*) basic_machine=m68k-unknown ;; amigaos | amigados) basic_machine=m68k-unknown os=-amigaos ;; amigaunix | amix) basic_machine=m68k-unknown os=-sysv4 ;; apollo68) basic_machine=m68k-apollo os=-sysv ;; apollo68bsd) basic_machine=m68k-apollo os=-bsd ;; aux) basic_machine=m68k-apple os=-aux ;; balance) basic_machine=ns32k-sequent os=-dynix ;; c90) basic_machine=c90-cray os=-unicos ;; convex-c1) basic_machine=c1-convex os=-bsd ;; convex-c2) basic_machine=c2-convex os=-bsd ;; convex-c32) basic_machine=c32-convex os=-bsd ;; convex-c34) basic_machine=c34-convex os=-bsd ;; convex-c38) basic_machine=c38-convex os=-bsd ;; cray | j90) basic_machine=j90-cray os=-unicos ;; crds | unos) basic_machine=m68k-crds ;; cris | cris-* | etrax*) basic_machine=cris-axis ;; da30 | da30-*) basic_machine=m68k-da30 ;; decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) basic_machine=mips-dec ;; decsystem10* | dec10*) basic_machine=pdp10-dec os=-tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec os=-tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; delta88) basic_machine=m88k-motorola os=-sysv3 ;; dpx20 | dpx20-*) basic_machine=rs6000-bull os=-bosx ;; dpx2* | dpx2*-bull) basic_machine=m68k-bull os=-sysv3 ;; ebmon29k) basic_machine=a29k-amd os=-ebmon ;; elxsi) basic_machine=elxsi-elxsi os=-bsd ;; encore | umax | mmax) basic_machine=ns32k-encore ;; es1800 | OSE68k | ose68k | ose | OSE) basic_machine=m68k-ericsson os=-ose ;; fx2800) basic_machine=i860-alliant ;; genix) basic_machine=ns32k-ns ;; gmicro) basic_machine=tron-gmicro os=-sysv ;; go32) basic_machine=i386-pc os=-go32 ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; h8300hms) basic_machine=h8300-hitachi os=-hms ;; h8300xray) basic_machine=h8300-hitachi os=-xray ;; h8500hms) basic_machine=h8500-hitachi os=-hms ;; harris) basic_machine=m88k-harris os=-sysv3 ;; hp300-*) basic_machine=m68k-hp ;; hp300bsd) basic_machine=m68k-hp os=-bsd ;; hp300hpux) basic_machine=m68k-hp os=-hpux ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k2[0-9][0-9] | hp9k31[0-9]) basic_machine=m68000-hp ;; hp9k3[2-9][0-9]) basic_machine=m68k-hp ;; hp9k6[0-9][0-9] | hp6[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k7[0-79][0-9] | hp7[0-79][0-9]) basic_machine=hppa1.1-hp ;; hp9k78[0-9] | hp78[0-9]) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[0-9][13679] | hp8[0-9][13679]) basic_machine=hppa1.1-hp ;; hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; hppa-next) os=-nextstep3 ;; hppaosf) basic_machine=hppa1.1-hp os=-osf ;; hppro) basic_machine=hppa1.1-hp os=-proelf ;; i370-ibm* | ibm*) basic_machine=i370-ibm ;; # I'm not sure what "Sysv32" means. Should this be sysv3.2? i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 ;; i*86v4*) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv4 ;; i*86v) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv ;; i*86sol2) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-solaris2 ;; i386mach) basic_machine=i386-mach os=-mach ;; i386-vsta | vsta) basic_machine=i386-unknown os=-vsta ;; iris | iris4d) basic_machine=mips-sgi case $os in -irix*) ;; *) os=-irix4 ;; esac ;; isi68 | isi) basic_machine=m68k-isi os=-sysv ;; m88k-omron*) basic_machine=m88k-omron ;; magnum | m3230) basic_machine=mips-mips os=-sysv ;; merlin) basic_machine=ns32k-utek os=-sysv ;; mingw32) basic_machine=i386-pc os=-mingw32 ;; miniframe) basic_machine=m68000-convergent ;; *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari os=-mint ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; mips3*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ;; mmix*) basic_machine=mmix-knuth os=-mmixware ;; monitor) basic_machine=m68k-rom68k os=-coff ;; morphos) basic_machine=powerpc-unknown os=-morphos ;; msdos) basic_machine=i386-pc os=-msdos ;; mvs) basic_machine=i370-ibm os=-mvs ;; ncr3000) basic_machine=i486-ncr os=-sysv4 ;; netbsd386) basic_machine=i386-unknown os=-netbsd ;; netwinder) basic_machine=armv4l-rebel os=-linux ;; news | news700 | news800 | news900) basic_machine=m68k-sony os=-newsos ;; news1000) basic_machine=m68030-sony os=-newsos ;; news-3600 | risc-news) basic_machine=mips-sony os=-newsos ;; necv70) basic_machine=v70-nec os=-sysv ;; next | m*-next ) basic_machine=m68k-next case $os in -nextstep* ) ;; -ns2*) os=-nextstep2 ;; *) os=-nextstep3 ;; esac ;; nh3000) basic_machine=m68k-harris os=-cxux ;; nh[45]000) basic_machine=m88k-harris os=-cxux ;; nindy960) basic_machine=i960-intel os=-nindy ;; mon960) basic_machine=i960-intel os=-mon960 ;; nonstopux) basic_machine=mips-compaq os=-nonstopux ;; np1) basic_machine=np1-gould ;; nv1) basic_machine=nv1-cray os=-unicosmp ;; nsr-tandem) basic_machine=nsr-tandem ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki os=-proelf ;; or32 | or32-*) basic_machine=or32-unknown os=-coff ;; OSE68000 | ose68000) basic_machine=m68000-ericsson os=-ose ;; os68k) basic_machine=m68k-none os=-os68k ;; pa-hitachi) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; paragon) basic_machine=i860-intel os=-osf ;; pbd) basic_machine=sparc-tti ;; pbb) basic_machine=m68k-tti ;; pc532 | pc532-*) basic_machine=ns32k-pc532 ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc ;; pentiumpro | p6 | 6x86 | athlon | athlon_*) basic_machine=i686-pc ;; pentiumii | pentium2 | pentiumiii | pentium3) basic_machine=i686-pc ;; pentium4) basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium4-*) basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould ;; power) basic_machine=power-ibm ;; ppc) basic_machine=powerpc-unknown ;; ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle | ppc-le | powerpc-little) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little | ppc64-le | powerpc64-little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; pw32) basic_machine=i586-unknown os=-pw32 ;; rom68k) basic_machine=m68k-rom68k os=-coff ;; rm[46]00) basic_machine=mips-siemens ;; rtpc | rtpc-*) basic_machine=romp-ibm ;; sa29200) basic_machine=a29k-amd os=-udi ;; sb1) basic_machine=mipsisa64sb1-unknown ;; sb1el) basic_machine=mipsisa64sb1el-unknown ;; sei) basic_machine=mips-sei os=-seiux ;; sequent) basic_machine=i386-sequent ;; sh) basic_machine=sh-hitachi os=-hms ;; sh64) basic_machine=sh64-unknown ;; sparclite-wrs | simso-wrs) basic_machine=sparclite-wrs os=-vxworks ;; sps7) basic_machine=m68k-bull os=-sysv2 ;; spur) basic_machine=spur-unknown ;; st2000) basic_machine=m68k-tandem ;; stratus) basic_machine=i860-stratus os=-sysv4 ;; sun2) basic_machine=m68000-sun ;; sun2os3) basic_machine=m68000-sun os=-sunos3 ;; sun2os4) basic_machine=m68000-sun os=-sunos4 ;; sun3os3) basic_machine=m68k-sun os=-sunos3 ;; sun3os4) basic_machine=m68k-sun os=-sunos4 ;; sun4os3) basic_machine=sparc-sun os=-sunos3 ;; sun4os4) basic_machine=sparc-sun os=-sunos4 ;; sun4sol2) basic_machine=sparc-sun os=-solaris2 ;; sun3 | sun3-*) basic_machine=m68k-sun ;; sun4) basic_machine=sparc-sun ;; sun386 | sun386i | roadrunner) basic_machine=i386-sun ;; sv1) basic_machine=sv1-cray os=-unicos ;; symmetry) basic_machine=i386-sequent os=-dynix ;; t3e) basic_machine=alphaev5-cray os=-unicos ;; t90) basic_machine=t90-cray os=-unicos ;; tic54x | c54x*) basic_machine=tic54x-unknown os=-coff ;; tic55x | c55x*) basic_machine=tic55x-unknown os=-coff ;; tic6x | c6x*) basic_machine=tic6x-unknown os=-coff ;; tx39) basic_machine=mipstx39-unknown ;; tx39el) basic_machine=mipstx39el-unknown ;; toad1) basic_machine=pdp10-xkl os=-tops20 ;; tower | tower-32) basic_machine=m68k-ncr ;; udi29k) basic_machine=a29k-amd os=-udi ;; ultra3) basic_machine=a29k-nyu os=-sym1 ;; v810 | necv810) basic_machine=v810-nec os=-none ;; vaxv) basic_machine=vax-dec os=-sysv ;; vms) basic_machine=vax-dec os=-vms ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; vxworks960) basic_machine=i960-wrs os=-vxworks ;; vxworks68) basic_machine=m68k-wrs os=-vxworks ;; vxworks29k) basic_machine=a29k-wrs os=-vxworks ;; w65*) basic_machine=w65-wdc os=-none ;; w89k-*) basic_machine=hppa1.1-winbond os=-proelf ;; xps | xps100) basic_machine=xps100-honeywell ;; ymp) basic_machine=ymp-cray os=-unicos ;; z8k-*-coff) basic_machine=z8k-unknown os=-sim ;; none) basic_machine=none-none os=-none ;; # Here we handle the default manufacturer of certain CPU types. It is in # some cases the only manufacturer, in others, it is the most popular. w89k) basic_machine=hppa1.1-winbond ;; op50n) basic_machine=hppa1.1-oki ;; op60c) basic_machine=hppa1.1-oki ;; romp) basic_machine=romp-ibm ;; rs6000) basic_machine=rs6000-ibm ;; vax) basic_machine=vax-dec ;; pdp10) # there are many clones, so DEC is not a safe bet basic_machine=pdp10-unknown ;; pdp11) basic_machine=pdp11-dec ;; we32k) basic_machine=we32k-att ;; sh3 | sh4 | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sh64) basic_machine=sh64-unknown ;; sparc | sparcv9 | sparcv9b) basic_machine=sparc-sun ;; cydra) basic_machine=cydra-cydrome ;; orion) basic_machine=orion-highlevel ;; orion105) basic_machine=clipper-highlevel ;; mac | mpw | mac-mpw) basic_machine=m68k-apple ;; pmac | pmac-mpw) basic_machine=powerpc-apple ;; *-unknown) # Make sure to match an already-canonicalized machine name. ;; *) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ;; *-commodore*) basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ;; *) ;; esac # Decode manufacturer-specific aliases for certain operating systems. if [ x"$os" != x"" ] then case $os in # First match some system type aliases # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; -solaris) os=-solaris2 ;; -svr4*) os=-sysv4 ;; -unixware*) os=-sysv4.2uw ;; -gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; # First accept the basic system types. # The portable systems comes first. # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -netbsd* | -openbsd* | -freebsd* | -riscix* \ | -lynxos* | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* \ | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ | -mingw32* | -linux-gnu* | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) case $basic_machine in x86-* | i*86-*) ;; *) os=-nto$os ;; esac ;; -nto-qnx*) ;; -nto*) os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ | -windows* | -osx | -abug | -netware* | -os9* | -beos* \ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ;; -mac*) os=`echo $os | sed -e 's|mac|macos|'` ;; -linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; -sunos5*) os=`echo $os | sed -e 's|sunos5|solaris2|'` ;; -sunos6*) os=`echo $os | sed -e 's|sunos6|solaris3|'` ;; -opened*) os=-openedition ;; -wince*) os=-wince ;; -osfrose*) os=-osfrose ;; -osf*) os=-osf ;; -utek*) os=-bsd ;; -dynix*) os=-bsd ;; -acis*) os=-aos ;; -atheos*) os=-atheos ;; -386bsd) os=-bsd ;; -ctix* | -uts*) os=-sysv ;; -nova*) os=-rtmk-nova ;; -ns2 ) os=-nextstep2 ;; -nsk*) os=-nsk ;; # Preserve the version number of sinix5. -sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; -sinix*) os=-sysv4 ;; -triton*) os=-sysv3 ;; -oss*) os=-sysv3 ;; -svr4) os=-sysv4 ;; -svr3) os=-sysv3 ;; -sysvr4) os=-sysv4 ;; # This must come after -sysvr4. -sysv*) ;; -ose*) os=-ose ;; -es1800*) os=-ose ;; -xenix) os=-xenix ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) os=-mint ;; -aros*) os=-aros ;; -kaos*) os=-kaos ;; -none) ;; *) # Get rid of the `-' at the beginning of $os. os=`echo $os | sed 's/[^-]*-//'` echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 exit 1 ;; esac else # Here we handle the default operating systems that come with various machines. # The value should be what the vendor currently ships out the door with their # machine or put another way, the most popular os provided with the machine. # Note that if you're going to try to match "-MANUFACTURER" here (say, # "-sun"), then you have to tell the case statement up towards the top # that MANUFACTURER isn't an operating system. Otherwise, code above # will signal an error saying that MANUFACTURER isn't an operating # system, and we'll never get to this point. case $basic_machine in *-acorn) os=-riscix1.2 ;; arm*-rebel) os=-linux ;; arm*-semi) os=-aout ;; c4x-* | tic4x-*) os=-coff ;; # This must come before the *-dec entry. pdp10-*) os=-tops20 ;; pdp11-*) os=-none ;; *-dec | vax-*) os=-ultrix4.2 ;; m68*-apollo) os=-domain ;; i386-sun) os=-sunos4.0.2 ;; m68000-sun) os=-sunos3 # This also exists in the configure program, but was not the # default. # os=-sunos4 ;; m68*-cisco) os=-aout ;; mips*-cisco) os=-elf ;; mips*-*) os=-elf ;; or32-*) os=-coff ;; *-tti) # must be before sparc entry or we get the wrong os. os=-sysv3 ;; sparc-* | *-sun) os=-sunos4.1.1 ;; *-be) os=-beos ;; *-ibm) os=-aix ;; *-wec) os=-proelf ;; *-winbond) os=-proelf ;; *-oki) os=-proelf ;; *-hp) os=-hpux ;; *-hitachi) os=-hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) os=-sysv ;; *-cbm) os=-amigaos ;; *-dg) os=-dgux ;; *-dolphin) os=-sysv3 ;; m68k-ccur) os=-rtu ;; m88k-omron*) os=-luna ;; *-next ) os=-nextstep ;; *-sequent) os=-ptx ;; *-crds) os=-unos ;; *-ns) os=-genix ;; i370-*) os=-mvs ;; *-next) os=-nextstep3 ;; *-gould) os=-sysv ;; *-highlevel) os=-bsd ;; *-encore) os=-bsd ;; *-sgi) os=-irix ;; *-siemens) os=-sysv4 ;; *-masscomp) os=-rtu ;; f30[01]-fujitsu | f700-fujitsu) os=-uxpv ;; *-rom68k) os=-coff ;; *-*bug) os=-coff ;; *-apple) os=-macos ;; *-atari*) os=-mint ;; *) os=-none ;; esac fi # Here we handle the case where we know the os, and the CPU type, but not the # manufacturer. We pick the logical manufacturer. vendor=unknown case $basic_machine in *-unknown) case $os in -riscix*) vendor=acorn ;; -sunos*) vendor=sun ;; -aix*) vendor=ibm ;; -beos*) vendor=be ;; -hpux*) vendor=hp ;; -mpeix*) vendor=hp ;; -hiux*) vendor=hitachi ;; -unos*) vendor=crds ;; -dgux*) vendor=dg ;; -luna*) vendor=omron ;; -genix*) vendor=ns ;; -mvs* | -opened*) vendor=ibm ;; -ptx*) vendor=sequent ;; -vxsim* | -vxworks* | -windiss*) vendor=wrs ;; -aux*) vendor=apple ;; -hms*) vendor=hitachi ;; -mpw* | -macos*) vendor=apple ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) vendor=atari ;; -vos*) vendor=stratus ;; esac basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ;; esac echo $basic_machine$os exit 0 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: sphinx-2.0.4-release/api/libsphinxclient/config.guess0000755000176700017710000012206511102461317022235 0ustar deogardeogar#! /bin/sh # Attempt to guess a canonical system name. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, # 2000, 2001, 2002, 2003 Free Software Foundation, Inc. timestamp='2003-06-17' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Per Bothner . # Please send patches to . Submit a context # diff and a properly formatted ChangeLog entry. # # This script attempts to guess a canonical system name similar to # config.sub. If it succeeds, it prints the system name on stdout, and # exits with 0. Otherwise, it exits with 1. # # The plan is that this can be called by configure scripts if you # don't specify an explicit build system type. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit 0 ;; --version | -v ) echo "$version" ; exit 0 ;; --help | --h* | -h ) echo "$usage"; exit 0 ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" >&2 exit 1 ;; * ) break ;; esac done if test $# != 0; then echo "$me: too many arguments$help" >&2 exit 1 fi trap 'exit 1' 1 2 15 # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a # headache to deal with in a portable fashion. # Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still # use `HOST_CC' if defined, but it is deprecated. # Portable tmp directory creation inspired by the Autoconf team. set_cc_for_build=' trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; : ${TMPDIR=/tmp} ; { tmp=`(umask 077 && mktemp -d -q "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; dummy=$tmp/dummy ; tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; case $CC_FOR_BUILD,$HOST_CC,$CC in ,,) echo "int x;" > $dummy.c ; for c in cc gcc c89 c99 ; do if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then CC_FOR_BUILD="$c"; break ; fi ; done ; if test x"$CC_FOR_BUILD" = x ; then CC_FOR_BUILD=no_compiler_found ; fi ;; ,,*) CC_FOR_BUILD=$CC ;; ,*,*) CC_FOR_BUILD=$HOST_CC ;; esac ;' # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) if (test -f /.attbin/uname) >/dev/null 2>&1 ; then PATH=$PATH:/.attbin ; export PATH fi UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown ## for Red Hat Linux if test -f /etc/redhat-release ; then VENDOR=redhat ; else VENDOR= ; fi # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward # compatibility and a consistent mechanism for selecting the # object file format. # # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ /usr/sbin/$sysctl 2>/dev/null || echo unknown)` case "${UNAME_MACHINE_ARCH}" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched # to ELF recently, or will in the future. case "${UNAME_MACHINE_ARCH}" in arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep __ELF__ >/dev/null then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? os=netbsd else os=netbsdelf fi ;; *) os=netbsd ;; esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. case "${UNAME_VERSION}" in Debian*) release='-gnu' ;; *) release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit 0 ;; amiga:OpenBSD:*:*) echo m68k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; arc:OpenBSD:*:*) echo mipsel-unknown-openbsd${UNAME_RELEASE} exit 0 ;; hp300:OpenBSD:*:*) echo m68k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; mac68k:OpenBSD:*:*) echo m68k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; macppc:OpenBSD:*:*) echo powerpc-unknown-openbsd${UNAME_RELEASE} exit 0 ;; mvme68k:OpenBSD:*:*) echo m68k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; mvme88k:OpenBSD:*:*) echo m88k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; mvmeppc:OpenBSD:*:*) echo powerpc-unknown-openbsd${UNAME_RELEASE} exit 0 ;; pmax:OpenBSD:*:*) echo mipsel-unknown-openbsd${UNAME_RELEASE} exit 0 ;; sgi:OpenBSD:*:*) echo mipseb-unknown-openbsd${UNAME_RELEASE} exit 0 ;; sun3:OpenBSD:*:*) echo m68k-unknown-openbsd${UNAME_RELEASE} exit 0 ;; wgrisc:OpenBSD:*:*) echo mipsel-unknown-openbsd${UNAME_RELEASE} exit 0 ;; *:OpenBSD:*:*) echo ${UNAME_MACHINE}-unknown-openbsd${UNAME_RELEASE} exit 0 ;; alpha:OSF1:*:*) if test $UNAME_RELEASE = "V4.0"; then UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` fi # According to Compaq, /usr/sbin/psrinfo has been available on # OSF/1 and Tru64 systems produced since 1995. I hope that # covers most systems running today. This code pipes the CPU # types through head -n 1, so we only detect the type of CPU 0. ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") UNAME_MACHINE="alpha" ;; "EV4.5 (21064)") UNAME_MACHINE="alpha" ;; "LCA4 (21066/21068)") UNAME_MACHINE="alpha" ;; "EV5 (21164)") UNAME_MACHINE="alphaev5" ;; "EV5.6 (21164A)") UNAME_MACHINE="alphaev56" ;; "EV5.6 (21164PC)") UNAME_MACHINE="alphapca56" ;; "EV5.7 (21164PC)") UNAME_MACHINE="alphapca57" ;; "EV6 (21264)") UNAME_MACHINE="alphaev6" ;; "EV6.7 (21264A)") UNAME_MACHINE="alphaev67" ;; "EV6.8CB (21264C)") UNAME_MACHINE="alphaev68" ;; "EV6.8AL (21264B)") UNAME_MACHINE="alphaev68" ;; "EV6.8CX (21264D)") UNAME_MACHINE="alphaev68" ;; "EV6.9A (21264/EV69A)") UNAME_MACHINE="alphaev69" ;; "EV7 (21364)") UNAME_MACHINE="alphaev7" ;; "EV7.9 (21364A)") UNAME_MACHINE="alphaev79" ;; esac # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[VTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` exit 0 ;; Alpha*:OpenVMS:*:*) echo alpha-hp-vms exit 0 ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead # of the specific Alpha model? echo alpha-pc-interix exit 0 ;; 21064:Windows_NT:50:3) echo alpha-dec-winnt3.5 exit 0 ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit 0;; *:[Aa]miga[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-amigaos exit 0 ;; *:[Mm]orph[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-morphos exit 0 ;; *:OS/390:*:*) echo i370-ibm-openedition exit 0 ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit 0;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) echo hppa1.1-hitachi-hiuxmpp exit 0;; Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. if test "`(/bin/universe) 2>/dev/null`" = att ; then echo pyramid-pyramid-sysv3 else echo pyramid-pyramid-bsd fi exit 0 ;; NILE*:*:*:dcosx) echo pyramid-pyramid-svr4 exit 0 ;; DRS?6000:unix:4.0:6*) echo sparc-icl-nx6 exit 0 ;; DRS?6000:UNIX_SV:4.2*:7*) case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7 && exit 0 ;; esac ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit 0 ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit 0 ;; i86pc:SunOS:5.*:*) echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit 0 ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit 0 ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in Series*|S4*) UNAME_RELEASE=`uname -v` ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` exit 0 ;; sun3*:SunOS:*:*) echo m68k-sun-sunos${UNAME_RELEASE} exit 0 ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) echo m68k-sun-sunos${UNAME_RELEASE} ;; sun4) echo sparc-sun-sunos${UNAME_RELEASE} ;; esac exit 0 ;; aushp:SunOS:*:*) echo sparc-auspex-sunos${UNAME_RELEASE} exit 0 ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not # "atarist" or "atariste" at least should have a processor # > m68000). The system name ranges from "MiNT" over "FreeMiNT" # to the lowercase version "mint" (or "freemint"). Finally # the system name "TOS" denotes a system which is actually not # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit 0 ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit 0 ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit 0 ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) echo m68k-milan-mint${UNAME_RELEASE} exit 0 ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) echo m68k-hades-mint${UNAME_RELEASE} exit 0 ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) echo m68k-unknown-mint${UNAME_RELEASE} exit 0 ;; powerpc:machten:*:*) echo powerpc-apple-machten${UNAME_RELEASE} exit 0 ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit 0 ;; RISC*:ULTRIX:*:*) echo mips-dec-ultrix${UNAME_RELEASE} exit 0 ;; VAX*:ULTRIX*:*:*) echo vax-dec-ultrix${UNAME_RELEASE} exit 0 ;; 2020:CLIX:*:* | 2430:CLIX:*:*) echo clipper-intergraph-clix${UNAME_RELEASE} exit 0 ;; mips:*:*:UMIPS | mips:*:*:RISCos) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { #else int main (argc, argv) int argc; char *argv[]; { #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c \ && $dummy `echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` \ && exit 0 echo mips-mips-riscos${UNAME_RELEASE} exit 0 ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax exit 0 ;; Motorola:*:4.3:PL8-*) echo powerpc-harris-powermax exit 0 ;; Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) echo powerpc-harris-powermax exit 0 ;; Night_Hawk:Power_UNIX:*:*) echo powerpc-harris-powerunix exit 0 ;; m88k:CX/UX:7*:*) echo m88k-harris-cxux7 exit 0 ;; m88k:*:4*:R4*) echo m88k-motorola-sysv4 exit 0 ;; m88k:*:3*:R3*) echo m88k-motorola-sysv3 exit 0 ;; AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ [ ${TARGET_BINARY_INTERFACE}x = x ] then echo m88k-dg-dgux${UNAME_RELEASE} else echo m88k-dg-dguxbcs${UNAME_RELEASE} fi else echo i586-dg-dgux${UNAME_RELEASE} fi exit 0 ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit 0 ;; M88*:*:R3*:*) # Delta 88k system running SVR3 echo m88k-motorola-sysv3 exit 0 ;; XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) echo m88k-tektronix-sysv3 exit 0 ;; Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) echo m68k-tektronix-bsd exit 0 ;; *:IRIX*:*:*) echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` exit 0 ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id exit 0 ;; # Note that: echo "'`uname -s`'" gives 'AIX ' i*86:AIX:*:*) echo i386-ibm-aix exit 0 ;; ia64:AIX:*:*) if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} exit 0 ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include main() { if (!__power_pc()) exit(1); puts("powerpc-ibm-aix3.2.5"); exit(0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && $dummy && exit 0 echo rs6000-ibm-aix3.2.5 elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then echo rs6000-ibm-aix3.2.4 else echo rs6000-ibm-aix3.2 fi exit 0 ;; *:AIX:*:[45]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${IBM_ARCH}-ibm-aix${IBM_REV} exit 0 ;; *:AIX:*:*) echo rs6000-ibm-aix exit 0 ;; ibmrt:4.4BSD:*|romp-ibm:BSD:*) echo romp-ibm-bsd4.4 exit 0 ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to exit 0 ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx exit 0 ;; DPX/2?00:B.O.S.:*:*) echo m68k-bull-sysv3 exit 0 ;; 9000/[34]??:4.3bsd:1.*:*) echo m68k-hp-bsd exit 0 ;; hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) echo m68k-hp-bsd4.4 exit 0 ;; 9000/[34678]??:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` case "${UNAME_MACHINE}" in 9000/31? ) HP_ARCH=m68000 ;; 9000/[34]?? ) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` case "${sc_cpu_version}" in 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 case "${sc_kernel_bits}" in 32) HP_ARCH="hppa2.0n" ;; 64) HP_ARCH="hppa2.0w" ;; '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 esac ;; esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #define _HPUX_SOURCE #include #include int main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); #endif long cpu = sysconf (_SC_CPU_VERSION); switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0"); break; case CPU_PA_RISC1_1: puts ("hppa1.1"); break; case CPU_PA_RISC2_0: #if defined(_SC_KERNEL_BITS) switch (bits) { case 64: puts ("hppa2.0w"); break; case 32: puts ("hppa2.0n"); break; default: puts ("hppa2.0"); break; } break; #else /* !defined(_SC_KERNEL_BITS) */ puts ("hppa2.0"); break; #endif default: puts ("hppa1.0"); break; } exit (0); } EOF (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac if [ ${HP_ARCH} = "hppa2.0w" ] then # avoid double evaluation of $set_cc_for_build test -n "$CC_FOR_BUILD" || eval $set_cc_for_build if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E -) | grep __LP64__ >/dev/null then HP_ARCH="hppa2.0w" else HP_ARCH="hppa64" fi fi echo ${HP_ARCH}-hp-hpux${HPUX_REV} exit 0 ;; ia64:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` echo ia64-hp-hpux${HPUX_REV} exit 0 ;; 3050*:HI-UX:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include int main () { long cpu = sysconf (_SC_CPU_VERSION); /* The order matters, because CPU_IS_HP_MC68K erroneously returns true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct results, however. */ if (CPU_IS_PA_RISC (cpu)) { switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; default: puts ("hppa-hitachi-hiuxwe2"); break; } } else if (CPU_IS_HP_MC68K (cpu)) puts ("m68k-hitachi-hiuxwe2"); else puts ("unknown-hitachi-hiuxwe2"); exit (0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && $dummy && exit 0 echo unknown-hitachi-hiuxwe2 exit 0 ;; 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) echo hppa1.1-hp-bsd exit 0 ;; 9000/8??:4.3bsd:*:*) echo hppa1.0-hp-bsd exit 0 ;; *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit 0 ;; hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) echo hppa1.1-hp-osf exit 0 ;; hp8??:OSF1:*:*) echo hppa1.0-hp-osf exit 0 ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then echo ${UNAME_MACHINE}-unknown-osf1mk else echo ${UNAME_MACHINE}-unknown-osf1 fi exit 0 ;; parisc*:Lites*:*:*) echo hppa1.1-hp-lites exit 0 ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd exit 0 ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit 0 ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd exit 0 ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd exit 0 ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd exit 0 ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit 0 ;; CRAY*[A-Z]90:*:*:*) echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit 0 ;; CRAY*TS:*:*:*) echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit 0 ;; CRAY*T3E:*:*:*) echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit 0 ;; CRAY*SV1:*:*:*) echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit 0 ;; *:UNICOS/mp:*:*) echo nv1-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit 0 ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit 0 ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} exit 0 ;; sparc*:BSD/OS:*:*) echo sparc-unknown-bsdi${UNAME_RELEASE} exit 0 ;; *:BSD/OS:*:*) echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit 0 ;; *:FreeBSD:*:*|*:GNU/FreeBSD:*:*) # Determine whether the default compiler uses glibc. eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include #if __GLIBC__ >= 2 LIBC=gnu #else LIBC= #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^LIBC=` echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`${LIBC:+-$LIBC} exit 0 ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit 0 ;; i*:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit 0 ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit 0 ;; x86:Interix*:[34]*) echo i586-pc-interix${UNAME_RELEASE}|sed -e 's/\..*//' exit 0 ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit 0 ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we # UNAME_MACHINE based on the output of uname instead of i386? echo i586-pc-interix exit 0 ;; i*:UWIN*:*) echo ${UNAME_MACHINE}-pc-uwin exit 0 ;; p*:CYGWIN*:*) echo powerpcle-unknown-cygwin exit 0 ;; prep*:SunOS:5.*:*) echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit 0 ;; *:GNU:*:*) echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit 0 ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit 0 ;; arm*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit 0 ;; cris:Linux:*:*) echo cris-axis-linux-gnu exit 0 ;; ia64:Linux:*:*) echo ${UNAME_MACHINE}-${VENDOR:-unknown}-linux-gnu exit 0 ;; m68*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit 0 ;; mips:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef mips #undef mipsel #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=mipsel #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=mips #else CPU= #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=` test x"${CPU}" != x && echo "${CPU}-unknown-linux-gnu" && exit 0 ;; mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef mips64 #undef mips64el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=mips64el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=mips64 #else CPU= #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^CPU=` test x"${CPU}" != x && echo "${CPU}-unknown-linux-gnu" && exit 0 ;; ppc:Linux:*:*) echo powerpc-${VENDOR:-unknown}-linux-gnu exit 0 ;; ppc64:Linux:*:*) echo powerpc64-${VENDOR:-unknown}-linux-gnu exit 0 ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in EV5) UNAME_MACHINE=alphaev5 ;; EV56) UNAME_MACHINE=alphaev56 ;; PCA56) UNAME_MACHINE=alphapca56 ;; PCA57) UNAME_MACHINE=alphapca56 ;; EV6) UNAME_MACHINE=alphaev6 ;; EV67) UNAME_MACHINE=alphaev67 ;; EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} exit 0 ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in PA7*) echo hppa1.1-unknown-linux-gnu ;; PA8*) echo hppa2.0-unknown-linux-gnu ;; *) echo hppa-unknown-linux-gnu ;; esac exit 0 ;; parisc64:Linux:*:* | hppa64:Linux:*:*) echo hppa64-unknown-linux-gnu exit 0 ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-${VENDOR:-ibm}-linux-gnu exit 0 ;; sh64*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit 0 ;; sh*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit 0 ;; sparc:Linux:*:* | sparc64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit 0 ;; x86_64:Linux:*:*) echo x86_64-${VENDOR:-unknown}-linux-gnu exit 0 ;; i*86:Linux:*:*) # The BFD linker knows what the default object file format is, so # first see if it will tell us. cd to the root directory to prevent # problems with other programs or directories called `ld' in the path. # Set LC_ALL=C to ensure ld outputs messages in English. ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \ | sed -ne '/supported targets:/!d s/[ ][ ]*/ /g s/.*supported targets: *// s/ .*// p'` case "$ld_supported_targets" in elf32-i386) TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu" ;; a.out-i386-linux) echo "${UNAME_MACHINE}-pc-linux-gnuaout" exit 0 ;; coff-i386) echo "${UNAME_MACHINE}-pc-linux-gnucoff" exit 0 ;; "") # Either a pre-BFD a.out linker (linux-gnuoldld) or # one that does not give us useful --help. echo "${UNAME_MACHINE}-pc-linux-gnuoldld" exit 0 ;; esac # Determine whether the default compiler is a.out or elf eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include #ifdef __ELF__ # ifdef __GLIBC__ # if __GLIBC__ >= 2 LIBC=gnu # else LIBC=gnulibc1 # endif # else LIBC=gnulibc1 # endif #else #ifdef __INTEL_COMPILER LIBC=gnu #else LIBC=gnuaout #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep ^LIBC=` test x"${LIBC}" != x && echo "${UNAME_MACHINE}-${VENDOR:-pc}-linux-${LIBC}" && exit 0 test x"${TENTATIVE}" != x && echo "${TENTATIVE}" && exit 0 ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both # sysname and nodename. echo i386-sequent-sysv4 exit 0 ;; i*86:UNIX_SV:4.2MP:2.*) # Unixware is an offshoot of SVR4, but it has its own version # number series starting with 2... # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit 0 ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. echo ${UNAME_MACHINE}-pc-os2-emx exit 0 ;; i*86:XTS-300:*:STOP) echo ${UNAME_MACHINE}-unknown-stop exit 0 ;; i*86:atheos:*:*) echo ${UNAME_MACHINE}-unknown-atheos exit 0 ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit 0 ;; i*86:*DOS:*:*) echo ${UNAME_MACHINE}-pc-msdosdjgpp exit 0 ;; i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} else echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} fi exit 0 ;; i*86:*:5:[78]*) case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} exit 0 ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ && UNAME_MACHINE=i586 (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 echo ${UNAME_MACHINE}-pc-sco$UNAME_REL else echo ${UNAME_MACHINE}-pc-sysv32 fi exit 0 ;; pc:*:*:*) # Left here for compatibility: # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i386. echo i386-pc-msdosdjgpp exit 0 ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit 0 ;; paragon:*:*:*) echo i860-intel-osf1 exit 0 ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 fi exit 0 ;; mini*:CTIX:SYS*5:*) # "miniframe" echo m68010-convergent-sysv exit 0 ;; mc68k:UNIX:SYSTEM5:3.51m) echo m68k-convergent-sysv exit 0 ;; M680?0:D-NIX:5.3:*) echo m68k-diab-dnix exit 0 ;; M68*:*:R3V[567]*:*) test -r /sysV68 && echo 'm68k-motorola-sysv' && exit 0 ;; 3[34]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0) OS_REL='' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && echo i486-ncr-sysv4.3${OS_REL} && exit 0 /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && echo i586-ncr-sysv4.3${OS_REL} && exit 0 ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && echo i486-ncr-sysv4 && exit 0 ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit 0 ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit 0 ;; TSUNAMI:LynxOS:2.*:*) echo sparc-unknown-lynxos${UNAME_RELEASE} exit 0 ;; rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit 0 ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit 0 ;; SM[BE]S:UNIX_SV:*:*) echo mips-dde-sysv${UNAME_RELEASE} exit 0 ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 exit 0 ;; RM*:SINIX-*:*:*) echo mips-sni-sysv4 exit 0 ;; *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` echo ${UNAME_MACHINE}-sni-sysv4 else echo ns32k-sni-sysv fi exit 0 ;; PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort # says echo i586-unisys-sysv4 exit 0 ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm echo hppa1.1-stratus-sysv4 exit 0 ;; *:*:*:FTX*) # From seanf@swdc.stratus.com. echo i860-stratus-sysv4 exit 0 ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit 0 ;; mc68*:A/UX:*:*) echo m68k-apple-aux${UNAME_RELEASE} exit 0 ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit 0 ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then echo mips-nec-sysv${UNAME_RELEASE} else echo mips-unknown-sysv${UNAME_RELEASE} fi exit 0 ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit 0 ;; BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. echo powerpc-apple-beos exit 0 ;; BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit 0 ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit 0 ;; SX-5:SUPER-UX:*:*) echo sx5-nec-superux${UNAME_RELEASE} exit 0 ;; SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit 0 ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit 0 ;; *:Rhapsody:*:*) echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} exit 0 ;; *:Darwin:*:*) case `uname -p` in *86) UNAME_PROCESSOR=i686 ;; powerpc) UNAME_PROCESSOR=powerpc ;; esac echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit 0 ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` if test "$UNAME_PROCESSOR" = "x86"; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} exit 0 ;; *:QNX:*:4*) echo i386-pc-qnx exit 0 ;; NSR-[DGKLNPTVW]:NONSTOP_KERNEL:*:*) echo nsr-tandem-nsk${UNAME_RELEASE} exit 0 ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux exit 0 ;; BS2000:POSIX*:*:*) echo bs2000-siemens-sysv exit 0 ;; DS/*:UNIX_System_V:*:*) echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} exit 0 ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. if test "$cputype" = "386"; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi echo ${UNAME_MACHINE}-unknown-plan9 exit 0 ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 exit 0 ;; *:TENEX:*:*) echo pdp10-unknown-tenex exit 0 ;; KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) echo pdp10-dec-tops20 exit 0 ;; XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) echo pdp10-xkl-tops20 exit 0 ;; *:TOPS-20:*:*) echo pdp10-unknown-tops20 exit 0 ;; *:ITS:*:*) echo pdp10-unknown-its exit 0 ;; SEI:*:*:SEIUX) echo mips-sei-seiux${UNAME_RELEASE} exit 0 ;; esac #echo '(No uname command or uname output not recognized.)' 1>&2 #echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 eval $set_cc_for_build cat >$dummy.c < # include #endif main () { #if defined (sony) #if defined (MIPSEB) /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, I don't know.... */ printf ("mips-sony-bsd\n"); exit (0); #else #include printf ("m68k-sony-newsos%s\n", #ifdef NEWSOS4 "4" #else "" #endif ); exit (0); #endif #endif #if defined (__arm) && defined (__acorn) && defined (__unix) printf ("arm-acorn-riscix"); exit (0); #endif #if defined (hp300) && !defined (hpux) printf ("m68k-hp-bsd\n"); exit (0); #endif #if defined (NeXT) #if !defined (__ARCHITECTURE__) #define __ARCHITECTURE__ "m68k" #endif int version; version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; if (version < 4) printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); else printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); exit (0); #endif #if defined (MULTIMAX) || defined (n16) #if defined (UMAXV) printf ("ns32k-encore-sysv\n"); exit (0); #else #if defined (CMU) printf ("ns32k-encore-mach\n"); exit (0); #else printf ("ns32k-encore-bsd\n"); exit (0); #endif #endif #endif #if defined (__386BSD__) printf ("i386-pc-bsd\n"); exit (0); #endif #if defined (sequent) #if defined (i386) printf ("i386-sequent-dynix\n"); exit (0); #endif #if defined (ns32000) printf ("ns32k-sequent-dynix\n"); exit (0); #endif #endif #if defined (_SEQUENT_) struct utsname un; uname(&un); if (strncmp(un.version, "V2", 2) == 0) { printf ("i386-sequent-ptx2\n"); exit (0); } if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ printf ("i386-sequent-ptx1\n"); exit (0); } printf ("i386-sequent-ptx\n"); exit (0); #endif #if defined (vax) # if !defined (ultrix) # include # if defined (BSD) # if BSD == 43 printf ("vax-dec-bsd4.3\n"); exit (0); # else # if BSD == 199006 printf ("vax-dec-bsd4.3reno\n"); exit (0); # else printf ("vax-dec-bsd\n"); exit (0); # endif # endif # else printf ("vax-dec-bsd\n"); exit (0); # endif # else printf ("vax-dec-ultrix\n"); exit (0); # endif #endif #if defined (alliant) && defined (i860) printf ("i860-alliant-bsd\n"); exit (0); #endif exit (1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && $dummy && exit 0 # Apollos put the system type in the environment. test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit 0; } # Convex versions that predate uname can use getsysinfo(1) if [ -x /usr/convex/getsysinfo ] then case `getsysinfo -f cpu_type` in c1*) echo c1-convex-bsd exit 0 ;; c2*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit 0 ;; c34*) echo c34-convex-bsd exit 0 ;; c38*) echo c38-convex-bsd exit 0 ;; c4*) echo c4-convex-bsd exit 0 ;; esac fi cat >&2 < in order to provide the needed information to handle your system. config.guess timestamp = $timestamp uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` /bin/uname -X = `(/bin/uname -X) 2>/dev/null` hostinfo = `(hostinfo) 2>/dev/null` /bin/universe = `(/bin/universe) 2>/dev/null` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` /bin/arch = `(/bin/arch) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` UNAME_MACHINE = ${UNAME_MACHINE} UNAME_RELEASE = ${UNAME_RELEASE} UNAME_SYSTEM = ${UNAME_SYSTEM} UNAME_VERSION = ${UNAME_VERSION} EOF exit 1 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: sphinx-2.0.4-release/api/libsphinxclient/configure0000755000176700017710000237147011102461317021634 0ustar deogardeogar#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.59 for libsphinxclient 0.0.1. # # Copyright (C) 2003 Free Software Foundation, Inc. # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## --------------------- ## ## M4sh Initialization. ## ## --------------------- ## # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then set -o posix fi DUALCASE=1; export DUALCASE # for MKS sh # Support unset when possible. if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then as_unset=unset else as_unset=false fi # Work around bugs in pre-3.0 UWIN ksh. $as_unset ENV MAIL MAILPATH PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. for as_var in \ LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ LC_TELEPHONE LC_TIME do if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then eval $as_var=C; export $as_var else $as_unset $as_var fi done # Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi if (basename /) >/dev/null 2>&1 && test "X`basename / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi # Name of the executable. as_me=`$as_basename "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)$' \| \ . : '\(.\)' 2>/dev/null || echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/; q; } /^X\/\(\/\/\)$/{ s//\1/; q; } /^X\/\(\/\).*/{ s//\1/; q; } s/.*/./; q'` # PATH needs CR, and LINENO needs CR and PATH. # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" || { # Find who we are. Look in the path if we contain no path at all # relative or not. case $0 in *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then { echo "$as_me: error: cannot find myself; rerun with an absolute path" >&2 { (exit 1); exit 1; }; } fi case $CONFIG_SHELL in '') as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for as_base in sh bash ksh sh5; do case $as_dir in /*) if ("$as_dir/$as_base" -c ' as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" ') 2>/dev/null; then $as_unset BASH_ENV || test "${BASH_ENV+set}" != set || { BASH_ENV=; export BASH_ENV; } $as_unset ENV || test "${ENV+set}" != set || { ENV=; export ENV; } CONFIG_SHELL=$as_dir/$as_base export CONFIG_SHELL exec "$CONFIG_SHELL" "$0" ${1+"$@"} fi;; esac done done ;; esac # Create $as_me.lineno as a copy of $as_myself, but with $LINENO # uniformly replaced by the line number. The first 'sed' inserts a # line-number line before each line; the second 'sed' does the real # work. The second script uses 'N' to pair each line-number line # with the numbered line, and appends trailing '-' during # substitution so that $LINENO is not a special case at line end. # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the # second 'sed' script. Blame Lee E. McMahon for sed's syntax. :-) sed '=' <$as_myself | sed ' N s,$,-, : loop s,^\(['$as_cr_digits']*\)\(.*\)[$]LINENO\([^'$as_cr_alnum'_]\),\1\2\1\3, t loop s,-$,, s,^['$as_cr_digits']*\n,, ' >$as_me.lineno && chmod +x $as_me.lineno || { echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2 { (exit 1); exit 1; }; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensible to this). . ./$as_me.lineno # Exit status is that of the last command. exit } case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in *c*,-n*) ECHO_N= ECHO_C=' ' ECHO_T=' ' ;; *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; *) ECHO_N= ECHO_C='\c' ECHO_T= ;; esac if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi rm -f conf$$ conf$$.exe conf$$.file echo >conf$$.file if ln -s conf$$.file conf$$ 2>/dev/null; then # We could just check for DJGPP; but this test a) works b) is more generic # and c) will remain valid once DJGPP supports symlinks (DJGPP 2.04). if test -f conf$$.exe; then # Don't use ln at all; we don't have any links as_ln_s='cp -p' else as_ln_s='ln -s' fi elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.file if mkdir -p . 2>/dev/null; then as_mkdir_p=: else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_executable_p="test -f" # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # IFS # We need space, tab and new line, in precisely that order. as_nl=' ' IFS=" $as_nl" # CDPATH. $as_unset CDPATH # Check that we are running under the correct shell. SHELL=${CONFIG_SHELL-/bin/sh} case X$ECHO in X*--fallback-echo) # Remove one level of quotation (which was required for Make). ECHO=`echo "$ECHO" | sed 's,\\\\\$\\$0,'$0','` ;; esac echo=${ECHO-echo} if test "X$1" = X--no-reexec; then # Discard the --no-reexec flag, and continue. shift elif test "X$1" = X--fallback-echo; then # Avoid inline document here, it may be left over : elif test "X`($echo '\t') 2>/dev/null`" = 'X\t' ; then # Yippee, $echo works! : else # Restart under the correct shell. exec $SHELL "$0" --no-reexec ${1+"$@"} fi if test "X$1" = X--fallback-echo; then # used as fallback echo shift cat </dev/null && echo_test_string="`eval $cmd`" && (test "X$echo_test_string" = "X$echo_test_string") 2>/dev/null then break fi done fi if test "X`($echo '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`($echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then : else # The Solaris, AIX, and Digital Unix default echo programs unquote # backslashes. This makes it impossible to quote backslashes using # echo "$something" | sed 's/\\/\\\\/g' # # So, first we look for a working echo in the user's PATH. lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for dir in $PATH /usr/ucb; do IFS="$lt_save_ifs" if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then echo="$dir/echo" break fi done IFS="$lt_save_ifs" if test "X$echo" = Xecho; then # We didn't find a better echo, so look for alternatives. if test "X`(print -r '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`(print -r "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # This shell has a builtin print -r that does the trick. echo='print -r' elif (test -f /bin/ksh || test -f /bin/ksh$ac_exeext) && test "X$CONFIG_SHELL" != X/bin/ksh; then # If we have ksh, try running configure again with it. ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} export ORIGINAL_CONFIG_SHELL CONFIG_SHELL=/bin/ksh export CONFIG_SHELL exec $CONFIG_SHELL "$0" --no-reexec ${1+"$@"} else # Try using printf. echo='printf %s\n' if test "X`($echo '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`($echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # Cool, printf works : elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL export CONFIG_SHELL SHELL="$CONFIG_SHELL" export SHELL echo="$CONFIG_SHELL $0 --fallback-echo" elif echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then echo="$CONFIG_SHELL $0 --fallback-echo" else # maybe with a smaller string... prev=: for cmd in 'echo test' 'sed 2q "$0"' 'sed 10q "$0"' 'sed 20q "$0"' 'sed 50q "$0"'; do if (test "X$echo_test_string" = "X`eval $cmd`") 2>/dev/null then break fi prev="$cmd" done if test "$prev" != 'sed 50q "$0"'; then echo_test_string=`eval $prev` export echo_test_string exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "$0" ${1+"$@"} else # Oops. We lost completely, so just stick with echo. echo=echo fi fi fi fi fi fi # Copy echo and quote the copy suitably for passing to libtool from # the Makefile, instead of quoting the original, which is used later. ECHO=$echo if test "X$ECHO" = "X$CONFIG_SHELL $0 --fallback-echo"; then ECHO="$CONFIG_SHELL \\\$\$0 --fallback-echo" fi tagnames=${tagnames+${tagnames},}CXX tagnames=${tagnames+${tagnames},}F77 # Name of the host. # hostname on some systems (SVR3.2, Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` exec 6>&1 # # Initializations. # ac_default_prefix=/usr/local ac_config_libobj_dir=. cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= SHELL=${CONFIG_SHELL-/bin/sh} # Maximum number of lines to put in a shell here document. # This variable seems obsolete. It should probably be removed, and # only ac_max_sed_lines should be used. : ${ac_max_here_lines=38} # Identity of this package. PACKAGE_NAME='libsphinxclient' PACKAGE_TARNAME='libsphinxclient' PACKAGE_VERSION='0.0.1' PACKAGE_STRING='libsphinxclient 0.0.1' PACKAGE_BUGREPORT='' ac_unique_file="README" ac_unique_file="sphinxclient.h" # Factoring default headers for most tests. ac_includes_default="\ #include #if HAVE_SYS_TYPES_H # include #endif #if HAVE_SYS_STAT_H # include #endif #if STDC_HEADERS # include # include #else # if HAVE_STDLIB_H # include # endif #endif #if HAVE_STRING_H # if !STDC_HEADERS && HAVE_MEMORY_H # include # endif # include #endif #if HAVE_STRINGS_H # include #endif #if HAVE_INTTYPES_H # include #else # if HAVE_STDINT_H # include # endif #endif #if HAVE_UNISTD_H # include #endif" ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA CYGPATH_W PACKAGE VERSION ACLOCAL AUTOCONF AUTOMAKE AUTOHEADER MAKEINFO install_sh STRIP ac_ct_STRIP INSTALL_STRIP_PROGRAM mkdir_p AWK SET_MAKE am__leading_dot AMTAR am__tar am__untar MAINTAINER_MODE_TRUE MAINTAINER_MODE_FALSE MAINT CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC EXEEXT OBJEXT DEPDIR am__include am__quote AMDEP_TRUE AMDEP_FALSE AMDEPBACKSLASH CCDEPMODE am__fastdepCC_TRUE am__fastdepCC_FALSE build build_cpu build_vendor build_os host host_cpu host_vendor host_os EGREP LN_S ECHO AR ac_ct_AR RANLIB ac_ct_RANLIB CPP CXXCPP LIBTOOL INSTALL_STRIP_FLAG LIBOBJS LTLIBOBJS' ac_subst_files='' # Initialize some variables set by options. ac_init_help= ac_init_version=false # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datadir='${prefix}/share' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' libdir='${exec_prefix}/lib' includedir='${prefix}/include' oldincludedir='/usr/include' infodir='${prefix}/info' mandir='${prefix}/man' ac_prev= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval "$ac_prev=\$ac_option" ac_prev= continue fi ac_optarg=`expr "x$ac_option" : 'x[^=]*=\(.*\)'` # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_option in -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad | --data | --dat | --da) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=* | --data=* | --dat=* \ | --da=*) datadir=$ac_optarg ;; -disable-* | --disable-*) ac_feature=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_feature" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid feature name: $ac_feature" >&2 { (exit 1); exit 1; }; } ac_feature=`echo $ac_feature | sed 's/-/_/g'` eval "enable_$ac_feature=no" ;; -enable-* | --enable-*) ac_feature=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_feature" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid feature name: $ac_feature" >&2 { (exit 1); exit 1; }; } ac_feature=`echo $ac_feature | sed 's/-/_/g'` case $ac_option in *=*) ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"`;; *) ac_optarg=yes ;; esac eval "enable_$ac_feature='$ac_optarg'" ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst \ | --locals | --local | --loca | --loc | --lo) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* \ | --locals=* | --local=* | --loca=* | --loc=* | --lo=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_package=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_package" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid package name: $ac_package" >&2 { (exit 1); exit 1; }; } ac_package=`echo $ac_package| sed 's/-/_/g'` case $ac_option in *=*) ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"`;; *) ac_optarg=yes ;; esac eval "with_$ac_package='$ac_optarg'" ;; -without-* | --without-*) ac_package=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_package" : ".*[^-_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid package name: $ac_package" >&2 { (exit 1); exit 1; }; } ac_package=`echo $ac_package | sed 's/-/_/g'` eval "with_$ac_package=no" ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) { echo "$as_me: error: unrecognized option: $ac_option Try \`$0 --help' for more information." >&2 { (exit 1); exit 1; }; } ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. expr "x$ac_envvar" : ".*[^_$as_cr_alnum]" >/dev/null && { echo "$as_me: error: invalid variable name: $ac_envvar" >&2 { (exit 1); exit 1; }; } ac_optarg=`echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` eval "$ac_envvar='$ac_optarg'" export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` { echo "$as_me: error: missing argument to $ac_option" >&2 { (exit 1); exit 1; }; } fi # Be sure to have absolute paths. for ac_var in exec_prefix prefix do eval ac_val=$`echo $ac_var` case $ac_val in [\\/$]* | ?:[\\/]* | NONE | '' ) ;; *) { echo "$as_me: error: expected an absolute directory name for --$ac_var: $ac_val" >&2 { (exit 1); exit 1; }; };; esac done # Be sure to have absolute paths. for ac_var in bindir sbindir libexecdir datadir sysconfdir sharedstatedir \ localstatedir libdir includedir oldincludedir infodir mandir do eval ac_val=$`echo $ac_var` case $ac_val in [\\/$]* | ?:[\\/]* ) ;; *) { echo "$as_me: error: expected an absolute directory name for --$ac_var: $ac_val" >&2 { (exit 1); exit 1; }; };; esac done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. If a cross compiler is detected then cross compile mode will be used." >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then its parent. ac_confdir=`(dirname "$0") 2>/dev/null || $as_expr X"$0" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$0" : 'X\(//\)[^/]' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$0" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` srcdir=$ac_confdir if test ! -r $srcdir/$ac_unique_file; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r $srcdir/$ac_unique_file; then if test "$ac_srcdir_defaulted" = yes; then { echo "$as_me: error: cannot find sources ($ac_unique_file) in $ac_confdir or .." >&2 { (exit 1); exit 1; }; } else { echo "$as_me: error: cannot find sources ($ac_unique_file) in $srcdir" >&2 { (exit 1); exit 1; }; } fi fi (cd $srcdir && test -r ./$ac_unique_file) 2>/dev/null || { echo "$as_me: error: sources are in $srcdir, but \`cd $srcdir' does not work" >&2 { (exit 1); exit 1; }; } srcdir=`echo "$srcdir" | sed 's%\([^\\/]\)[\\/]*$%\1%'` ac_env_build_alias_set=${build_alias+set} ac_env_build_alias_value=$build_alias ac_cv_env_build_alias_set=${build_alias+set} ac_cv_env_build_alias_value=$build_alias ac_env_host_alias_set=${host_alias+set} ac_env_host_alias_value=$host_alias ac_cv_env_host_alias_set=${host_alias+set} ac_cv_env_host_alias_value=$host_alias ac_env_target_alias_set=${target_alias+set} ac_env_target_alias_value=$target_alias ac_cv_env_target_alias_set=${target_alias+set} ac_cv_env_target_alias_value=$target_alias ac_env_CC_set=${CC+set} ac_env_CC_value=$CC ac_cv_env_CC_set=${CC+set} ac_cv_env_CC_value=$CC ac_env_CFLAGS_set=${CFLAGS+set} ac_env_CFLAGS_value=$CFLAGS ac_cv_env_CFLAGS_set=${CFLAGS+set} ac_cv_env_CFLAGS_value=$CFLAGS ac_env_LDFLAGS_set=${LDFLAGS+set} ac_env_LDFLAGS_value=$LDFLAGS ac_cv_env_LDFLAGS_set=${LDFLAGS+set} ac_cv_env_LDFLAGS_value=$LDFLAGS ac_env_CPPFLAGS_set=${CPPFLAGS+set} ac_env_CPPFLAGS_value=$CPPFLAGS ac_cv_env_CPPFLAGS_set=${CPPFLAGS+set} ac_cv_env_CPPFLAGS_value=$CPPFLAGS ac_env_CPP_set=${CPP+set} ac_env_CPP_value=$CPP ac_cv_env_CPP_set=${CPP+set} ac_cv_env_CPP_value=$CPP ac_env_CXXCPP_set=${CXXCPP+set} ac_env_CXXCPP_value=$CXXCPP ac_cv_env_CXXCPP_set=${CXXCPP+set} ac_cv_env_CXXCPP_value=$CXXCPP # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures libsphinxclient 0.0.1 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] _ACEOF cat <<_ACEOF Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --datadir=DIR read-only architecture-independent data [PREFIX/share] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --infodir=DIR info documentation [PREFIX/info] --mandir=DIR man documentation [PREFIX/man] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names System types: --build=BUILD configure for building on BUILD [guessed] --host=HOST cross-compile to build programs to run on HOST [BUILD] _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of libsphinxclient 0.0.1:";; esac cat <<\_ACEOF Optional Features: --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-maintainer-mode enable make rules and dependencies not useful (and sometimes confusing) to the casual installer --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors --enable-shared[=PKGS] build shared libraries [default=yes] --enable-static[=PKGS] build static libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --disable-libtool-lock avoid locking (might break parallel builds) --enable-debug enable debugging symbols and compile flags Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-gnu-ld assume the C compiler uses GNU ld [default=no] --with-pic try to use only PIC/non-PIC objects [default=use both] --with-tags[=TAGS] include additional configurations [automatic] Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory CPPFLAGS C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CPP C preprocessor CXXCPP C++ preprocessor Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. _ACEOF fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. ac_popdir=`pwd` for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d $ac_dir || continue ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac cd $ac_dir # Check for guested configure; otherwise get Cygnus style configure. if test -f $ac_srcdir/configure.gnu; then echo $SHELL $ac_srcdir/configure.gnu --help=recursive elif test -f $ac_srcdir/configure; then echo $SHELL $ac_srcdir/configure --help=recursive elif test -f $ac_srcdir/configure.ac || test -f $ac_srcdir/configure.in; then echo $ac_configure --help else echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi cd $ac_popdir done fi test -n "$ac_init_help" && exit 0 if $ac_init_version; then cat <<\_ACEOF libsphinxclient configure 0.0.1 generated by GNU Autoconf 2.59 Copyright (C) 2003 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit 0 fi exec 5>config.log cat >&5 <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by libsphinxclient $as_me 0.0.1, which was generated by GNU Autoconf 2.59. Invocation command line was $ $0 $@ _ACEOF { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` hostinfo = `(hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. echo "PATH: $as_dir" done } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_sep= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *" "*|*" "*|*[\[\]\~\#\$\^\&\*\(\)\{\}\\\|\;\<\>\?\"\']*) ac_arg=`echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) ac_configure_args0="$ac_configure_args0 '$ac_arg'" ;; 2) ac_configure_args1="$ac_configure_args1 '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi ac_configure_args="$ac_configure_args$ac_sep'$ac_arg'" # Get rid of the leading space. ac_sep=" " ;; esac done done $as_unset ac_configure_args0 || test "${ac_configure_args0+set}" != set || { ac_configure_args0=; export ac_configure_args0; } $as_unset ac_configure_args1 || test "${ac_configure_args1+set}" != set || { ac_configure_args1=; export ac_configure_args1; } # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Be sure not to use single quotes in there, as some shells, # such as our DU 5.0 friend, will then `close' the trap. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo cat <<\_ASBOX ## ---------------- ## ## Cache variables. ## ## ---------------- ## _ASBOX echo # The following way of writing the cache mishandles newlines in values, { (set) 2>&1 | case `(ac_space='"'"' '"'"'; set | grep ac_space) 2>&1` in *ac_space=\ *) sed -n \ "s/'"'"'/'"'"'\\\\'"'"''"'"'/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='"'"'\\2'"'"'/p" ;; *) sed -n \ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" ;; esac; } echo cat <<\_ASBOX ## ----------------- ## ## Output variables. ## ## ----------------- ## _ASBOX echo for ac_var in $ac_subst_vars do eval ac_val=$`echo $ac_var` echo "$ac_var='"'"'$ac_val'"'"'" done | sort echo if test -n "$ac_subst_files"; then cat <<\_ASBOX ## ------------- ## ## Output files. ## ## ------------- ## _ASBOX echo for ac_var in $ac_subst_files do eval ac_val=$`echo $ac_var` echo "$ac_var='"'"'$ac_val'"'"'" done | sort echo fi if test -s confdefs.h; then cat <<\_ASBOX ## ----------- ## ## confdefs.h. ## ## ----------- ## _ASBOX echo sed "/^$/d" confdefs.h | sort echo fi test "$ac_signal" != 0 && echo "$as_me: caught signal $ac_signal" echo "$as_me: exit $exit_status" } >&5 rm -f core *.core && rm -rf conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; { (exit 1); exit 1; }' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -rf conftest* confdefs.h # AIX cpp loses on an empty file, so make sure it contains at least a newline. echo >confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer explicitly selected file to automatically selected ones. if test -z "$CONFIG_SITE"; then if test "x$prefix" != xNONE; then CONFIG_SITE="$prefix/share/config.site $prefix/etc/config.site" else CONFIG_SITE="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" fi fi for ac_site_file in $CONFIG_SITE; do if test -r "$ac_site_file"; then { echo "$as_me:$LINENO: loading site script $ac_site_file" >&5 echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special # files actually), so we avoid doing that. if test -f "$cache_file"; then { echo "$as_me:$LINENO: loading cache $cache_file" >&5 echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . $cache_file;; *) . ./$cache_file;; esac fi else { echo "$as_me:$LINENO: creating cache $cache_file" >&5 echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in `(set) 2>&1 | sed -n 's/^ac_env_\([a-zA-Z_0-9]*\)_set=.*/\1/p'`; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val="\$ac_cv_env_${ac_var}_value" eval ac_new_val="\$ac_env_${ac_var}_value" case $ac_old_set,$ac_new_set in set,) { echo "$as_me:$LINENO: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { echo "$as_me:$LINENO: error: \`$ac_var' was not set in the previous run" >&5 echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then { echo "$as_me:$LINENO: error: \`$ac_var' has changed since the previous run:" >&5 echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} { echo "$as_me:$LINENO: former value: $ac_old_val" >&5 echo "$as_me: former value: $ac_old_val" >&2;} { echo "$as_me:$LINENO: current value: $ac_new_val" >&5 echo "$as_me: current value: $ac_new_val" >&2;} ac_cache_corrupted=: fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *" "*|*" "*|*[\[\]\~\#\$\^\&\*\(\)\{\}\\\|\;\<\>\?\"\']*) ac_arg=$ac_var=`echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) ac_configure_args="$ac_configure_args '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { echo "$as_me:$LINENO: error: changes in the environment can compromise the build" >&5 echo "$as_me: error: changes in the environment can compromise the build" >&2;} { { echo "$as_me:$LINENO: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&5 echo "$as_me: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&2;} { (exit 1); exit 1; }; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_config_headers="$ac_config_headers sphinxclient_config.h" am__api_version="1.9" ac_aux_dir= for ac_dir in $srcdir $srcdir/.. $srcdir/../..; do if test -f $ac_dir/install-sh; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f $ac_dir/install.sh; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f $ac_dir/shtool; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then { { echo "$as_me:$LINENO: error: cannot find install-sh or install.sh in $srcdir $srcdir/.. $srcdir/../.." >&5 echo "$as_me: error: cannot find install-sh or install.sh in $srcdir $srcdir/.. $srcdir/../.." >&2;} { (exit 1); exit 1; }; } fi ac_config_guess="$SHELL $ac_aux_dir/config.guess" ac_config_sub="$SHELL $ac_aux_dir/config.sub" ac_configure="$SHELL $ac_aux_dir/configure" # This should be Cygnus configure. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6 if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in ./ | .// | /cC/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi done done ;; esac done fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. We don't cache a # path for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the path is relative. INSTALL=$ac_install_sh fi fi echo "$as_me:$LINENO: result: $INSTALL" >&5 echo "${ECHO_T}$INSTALL" >&6 # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' echo "$as_me:$LINENO: checking whether build environment is sane" >&5 echo $ECHO_N "checking whether build environment is sane... $ECHO_C" >&6 # Just in case sleep 1 echo timestamp > conftest.file # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t $srcdir/configure conftest.file` fi rm -f conftest.file if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". { { echo "$as_me:$LINENO: error: ls -t appears to fail. Make sure there is not a broken alias in your environment" >&5 echo "$as_me: error: ls -t appears to fail. Make sure there is not a broken alias in your environment" >&2;} { (exit 1); exit 1; }; } fi test "$2" = conftest.file ) then # Ok. : else { { echo "$as_me:$LINENO: error: newly created file is older than distributed files! Check your system clock" >&5 echo "$as_me: error: newly created file is older than distributed files! Check your system clock" >&2;} { (exit 1); exit 1; }; } fi echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 test "$program_prefix" != NONE && program_transform_name="s,^,$program_prefix,;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s,\$,$program_suffix,;$program_transform_name" # Double any \ or $. echo might interpret backslashes. # By default was `s,x,x', remove it if useless. cat <<\_ACEOF >conftest.sed s/[\\$]/&&/g;s/;s,x,x,$// _ACEOF program_transform_name=`echo $program_transform_name | sed -f conftest.sed` rm conftest.sed # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= { echo "$as_me:$LINENO: WARNING: \`missing' script is too old or missing" >&5 echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} fi if mkdir -p --version . >/dev/null 2>&1 && test ! -d ./--version; then # We used to keeping the `.' as first argument, in order to # allow $(mkdir_p) to be used without argument. As in # $(mkdir_p) $(somedir) # where $(somedir) is conditionally defined. However this is wrong # for two reasons: # 1. if the package is installed by a user who cannot write `.' # make install will fail, # 2. the above comment should most certainly read # $(mkdir_p) $(DESTDIR)$(somedir) # so it does not work when $(somedir) is undefined and # $(DESTDIR) is not. # To support the latter case, we have to write # test -z "$(somedir)" || $(mkdir_p) $(DESTDIR)$(somedir), # so the `.' trick is pointless. mkdir_p='mkdir -p --' else # On NextStep and OpenStep, the `mkdir' command does not # recognize any option. It will interpret all options as # directories to create, and then abort because `.' already # exists. for d in ./-p ./--version; do test -d $d && rmdir $d done # $(mkinstalldirs) is defined by Automake if mkinstalldirs exists. if test -f "$ac_aux_dir/mkinstalldirs"; then mkdir_p='$(mkinstalldirs)' else mkdir_p='$(install_sh) -d' fi fi for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_AWK+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then echo "$as_me:$LINENO: result: $AWK" >&5 echo "${ECHO_T}$AWK" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$AWK" && break done echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5 echo $ECHO_N "checking whether ${MAKE-make} sets \$(MAKE)... $ECHO_C" >&6 set dummy ${MAKE-make}; ac_make=`echo "$2" | sed 'y,:./+-,___p_,'` if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.make <<\_ACEOF all: @echo 'ac_maketemp="$(MAKE)"' _ACEOF # GNU make sometimes prints "make[1]: Entering...", which would confuse us. eval `${MAKE-make} -f conftest.make 2>/dev/null | grep temp=` if test -n "$ac_maketemp"; then eval ac_cv_prog_make_${ac_make}_set=yes else eval ac_cv_prog_make_${ac_make}_set=no fi rm -f conftest.make fi if eval "test \"`echo '$ac_cv_prog_make_'${ac_make}_set`\" = yes"; then echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 SET_MAKE= else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # test to see if srcdir already configured if test "`cd $srcdir && pwd`" != "`pwd`" && test -f $srcdir/config.status; then { { echo "$as_me:$LINENO: error: source directory already configured; run \"make distclean\" there first" >&5 echo "$as_me: error: source directory already configured; run \"make distclean\" there first" >&2;} { (exit 1); exit 1; }; } fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='libsphinxclient' VERSION='0.0.1' # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} install_sh=${install_sh-"$am_aux_dir/install-sh"} # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then echo "$as_me:$LINENO: result: $STRIP" >&5 echo "${ECHO_T}$STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_STRIP" && ac_cv_prog_ac_ct_STRIP=":" fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then echo "$as_me:$LINENO: result: $ac_ct_STRIP" >&5 echo "${ECHO_T}$ac_ct_STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi STRIP=$ac_ct_STRIP else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\${SHELL} \$(install_sh) -c -s" # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. AMTAR=${AMTAR-"${am_missing_run}tar"} am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' echo "$as_me:$LINENO: checking whether to enable maintainer-specific portions of Makefiles" >&5 echo $ECHO_N "checking whether to enable maintainer-specific portions of Makefiles... $ECHO_C" >&6 # Check whether --enable-maintainer-mode or --disable-maintainer-mode was given. if test "${enable_maintainer_mode+set}" = set; then enableval="$enable_maintainer_mode" USE_MAINTAINER_MODE=$enableval else USE_MAINTAINER_MODE=no fi; echo "$as_me:$LINENO: result: $USE_MAINTAINER_MODE" >&5 echo "${ECHO_T}$USE_MAINTAINER_MODE" >&6 if test $USE_MAINTAINER_MODE = yes; then MAINTAINER_MODE_TRUE= MAINTAINER_MODE_FALSE='#' else MAINTAINER_MODE_TRUE='#' MAINTAINER_MODE_FALSE= fi MAINT=$MAINTAINER_MODE_TRUE ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi CC=$ac_ct_CC else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi CC=$ac_ct_CC else CC="$ac_cv_prog_CC" fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then echo "$as_me:$LINENO: result: $CC" >&5 echo "${ECHO_T}$CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_CC+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 echo "${ECHO_T}$ac_ct_CC" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -n "$ac_ct_CC" && break done CC=$ac_ct_CC fi fi test -z "$CC" && { { echo "$as_me:$LINENO: error: no acceptable C compiler found in \$PATH See \`config.log' for more details." >&5 echo "$as_me: error: no acceptable C compiler found in \$PATH See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } # Provide some information about the compiler. echo "$as_me:$LINENO:" \ "checking for C compiler version" >&5 ac_compiler=`set X $ac_compile; echo $2` { (eval echo "$as_me:$LINENO: \"$ac_compiler --version &5\"") >&5 (eval $ac_compiler --version &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { (eval echo "$as_me:$LINENO: \"$ac_compiler -v &5\"") >&5 (eval $ac_compiler -v &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { (eval echo "$as_me:$LINENO: \"$ac_compiler -V &5\"") >&5 (eval $ac_compiler -V &5) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. echo "$as_me:$LINENO: checking for C compiler default output file name" >&5 echo $ECHO_N "checking for C compiler default output file name... $ECHO_C" >&6 ac_link_default=`echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` if { (eval echo "$as_me:$LINENO: \"$ac_link_default\"") >&5 (eval $ac_link_default) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # Find the output, starting from the most likely. This scheme is # not robust to junk in `.', hence go to wildcards (a.*) only as a last # resort. # Be careful to initialize this variable, since it used to be cached. # Otherwise an old cache value of `no' led to `EXEEXT = no' in a Makefile. ac_cv_exeext= # b.out is created by i960 compilers. for ac_file in a_out.exe a.exe conftest.exe a.out conftest a.* conftest.* b.out do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;; conftest.$ac_ext ) # This is the source file. ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` # FIXME: I believe we export ac_cv_exeext for Libtool, # but it would be cool to find out if it's true. Does anybody # maintain Libtool? --akim. export ac_cv_exeext break;; * ) break;; esac done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { echo "$as_me:$LINENO: error: C compiler cannot create executables See \`config.log' for more details." >&5 echo "$as_me: error: C compiler cannot create executables See \`config.log' for more details." >&2;} { (exit 77); exit 77; }; } fi ac_exeext=$ac_cv_exeext echo "$as_me:$LINENO: result: $ac_file" >&5 echo "${ECHO_T}$ac_file" >&6 # Check the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. echo "$as_me:$LINENO: checking whether the C compiler works" >&5 echo $ECHO_N "checking whether the C compiler works... $ECHO_C" >&6 # FIXME: These cross compiler hacks should be removed for Autoconf 3.0 # If not cross compiling, check that we can run a simple program. if test "$cross_compiling" != yes; then if { ac_try='./$ac_file' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { echo "$as_me:$LINENO: error: cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details." >&5 echo "$as_me: error: cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi fi fi echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 rm -f a.out a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save # Check the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. echo "$as_me:$LINENO: checking whether we are cross compiling" >&5 echo $ECHO_N "checking whether we are cross compiling... $ECHO_C" >&6 echo "$as_me:$LINENO: result: $cross_compiling" >&5 echo "${ECHO_T}$cross_compiling" >&6 echo "$as_me:$LINENO: checking for suffix of executables" >&5 echo $ECHO_N "checking for suffix of executables... $ECHO_C" >&6 if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` export ac_cv_exeext break;; * ) break;; esac done else { { echo "$as_me:$LINENO: error: cannot compute suffix of executables: cannot compile and link See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute suffix of executables: cannot compile and link See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f conftest$ac_cv_exeext echo "$as_me:$LINENO: result: $ac_cv_exeext" >&5 echo "${ECHO_T}$ac_cv_exeext" >&6 rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT echo "$as_me:$LINENO: checking for suffix of object files" >&5 echo $ECHO_N "checking for suffix of object files... $ECHO_C" >&6 if test "${ac_cv_objext+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then for ac_file in `(ls conftest.o conftest.obj; ls conftest.*) 2>/dev/null`; do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { echo "$as_me:$LINENO: error: cannot compute suffix of object files: cannot compile See \`config.log' for more details." >&5 echo "$as_me: error: cannot compute suffix of object files: cannot compile See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_objext" >&5 echo "${ECHO_T}$ac_cv_objext" >&6 OBJEXT=$ac_cv_objext ac_objext=$OBJEXT echo "$as_me:$LINENO: checking whether we are using the GNU C compiler" >&5 echo $ECHO_N "checking whether we are using the GNU C compiler... $ECHO_C" >&6 if test "${ac_cv_c_compiler_gnu+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_compiler_gnu=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_compiler_gnu=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi echo "$as_me:$LINENO: result: $ac_cv_c_compiler_gnu" >&5 echo "${ECHO_T}$ac_cv_c_compiler_gnu" >&6 GCC=`test $ac_compiler_gnu = yes && echo yes` ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS CFLAGS="-g" echo "$as_me:$LINENO: checking whether $CC accepts -g" >&5 echo $ECHO_N "checking whether $CC accepts -g... $ECHO_C" >&6 if test "${ac_cv_prog_cc_g+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_prog_cc_g=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_prog_cc_g=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_prog_cc_g" >&5 echo "${ECHO_T}$ac_cv_prog_cc_g" >&6 if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi echo "$as_me:$LINENO: checking for $CC option to accept ANSI C" >&5 echo $ECHO_N "checking for $CC option to accept ANSI C... $ECHO_C" >&6 if test "${ac_cv_prog_cc_stdc+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_cv_prog_cc_stdc=no ac_save_CC=$CC cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #include #include #include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std1 is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std1. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF # Don't try gcc -ansi; that turns off useful extensions and # breaks some systems' header files. # AIX -qlanglvl=ansi # Ultrix and OSF/1 -std1 # HP-UX 10.20 and later -Ae # HP-UX older versions -Aa -D_HPUX_SOURCE # SVR4 -Xc -D__EXTENSIONS__ for ac_arg in "" -qlanglvl=ansi -std1 -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_prog_cc_stdc=$ac_arg break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext done rm -f conftest.$ac_ext conftest.$ac_objext CC=$ac_save_CC fi case "x$ac_cv_prog_cc_stdc" in x|xno) echo "$as_me:$LINENO: result: none needed" >&5 echo "${ECHO_T}none needed" >&6 ;; *) echo "$as_me:$LINENO: result: $ac_cv_prog_cc_stdc" >&5 echo "${ECHO_T}$ac_cv_prog_cc_stdc" >&6 CC="$CC $ac_cv_prog_cc_stdc" ;; esac # Some people use a C++ compiler to compile C. Since we use `exit', # in C++ we need to declare it. In case someone uses the same compiler # for both compiling C and C++ we need to have the C++ compiler decide # the declaration of exit, since it's the most demanding environment. cat >conftest.$ac_ext <<_ACEOF #ifndef __cplusplus choke me #endif _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then for ac_declaration in \ '' \ 'extern "C" void std::exit (int) throw (); using std::exit;' \ 'extern "C" void std::exit (int); using std::exit;' \ 'extern "C" void exit (int) throw ();' \ 'extern "C" void exit (int);' \ 'void exit (int);' do cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_declaration #include int main () { exit (42); ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 continue fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_declaration int main () { exit (42); ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then break else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext done rm -f conftest* if test -n "$ac_declaration"; then echo '#ifdef __cplusplus' >>confdefs.h echo $ac_declaration >>confdefs.h echo '#endif' >>confdefs.h fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo done .PHONY: am__doit END # If we don't find an include directive, just comment out the code. echo "$as_me:$LINENO: checking for style of include used by $am_make" >&5 echo $ECHO_N "checking for style of include used by $am_make... $ECHO_C" >&6 am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # We grep out `Entering directory' and `Leaving directory' # messages which can occur if `w' ends up in MAKEFLAGS. # In particular we don't look at `^make:' because GNU make might # be invoked under some other name (usually "gmake"), in which # case it prints its new name instead of `make'. if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then am__include=include am__quote= _am_result=GNU fi # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then am__include=.include am__quote="\"" _am_result=BSD fi fi echo "$as_me:$LINENO: result: $_am_result" >&5 echo "${ECHO_T}$_am_result" >&6 rm -f confinc confmf # Check whether --enable-dependency-tracking or --disable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then enableval="$enable_dependency_tracking" fi; if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi depcc="$CC" am_compiler_list= echo "$as_me:$LINENO: checking dependency style of $depcc" >&5 echo $ECHO_N "checking dependency style of $depcc... $ECHO_C" >&6 if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf case $depmode in nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; none) break ;; esac # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. if depmode=$depmode \ source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi echo "$as_me:$LINENO: result: $am_cv_CC_dependencies_compiler_type" >&5 echo "${ECHO_T}$am_cv_CC_dependencies_compiler_type" >&6 CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi echo "$as_me:$LINENO: checking for a sed that does not truncate output" >&5 echo $ECHO_N "checking for a sed that does not truncate output... $ECHO_C" >&6 if test "${lt_cv_path_SED+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # Loop through the user's path and test for sed and gsed. # Then use that list of sed's as ones to test for truncation. as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for lt_ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" fi done done done lt_ac_max=0 lt_ac_count=0 # Add /usr/xpg4/bin/sed as it is typically found on Solaris # along with /bin/sed that truncates output. for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do test ! -f $lt_ac_sed && break cat /dev/null > conftest.in lt_ac_count=0 echo $ECHO_N "0123456789$ECHO_C" >conftest.in # Check for GNU sed and select it if it is found. if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then lt_cv_path_SED=$lt_ac_sed break fi while true; do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo >>conftest.nl $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break cmp -s conftest.out conftest.nl || break # 10000 chars as input seems more than enough test $lt_ac_count -gt 10 && break lt_ac_count=`expr $lt_ac_count + 1` if test $lt_ac_count -gt $lt_ac_max; then lt_ac_max=$lt_ac_count lt_cv_path_SED=$lt_ac_sed fi done done SED=$lt_cv_path_SED fi echo "$as_me:$LINENO: result: $SED" >&5 echo "${ECHO_T}$SED" >&6 # Make sure we can run config.sub. $ac_config_sub sun4 >/dev/null 2>&1 || { { echo "$as_me:$LINENO: error: cannot run $ac_config_sub" >&5 echo "$as_me: error: cannot run $ac_config_sub" >&2;} { (exit 1); exit 1; }; } echo "$as_me:$LINENO: checking build system type" >&5 echo $ECHO_N "checking build system type... $ECHO_C" >&6 if test "${ac_cv_build+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_cv_build_alias=$build_alias test -z "$ac_cv_build_alias" && ac_cv_build_alias=`$ac_config_guess` test -z "$ac_cv_build_alias" && { { echo "$as_me:$LINENO: error: cannot guess build type; you must specify one" >&5 echo "$as_me: error: cannot guess build type; you must specify one" >&2;} { (exit 1); exit 1; }; } ac_cv_build=`$ac_config_sub $ac_cv_build_alias` || { { echo "$as_me:$LINENO: error: $ac_config_sub $ac_cv_build_alias failed" >&5 echo "$as_me: error: $ac_config_sub $ac_cv_build_alias failed" >&2;} { (exit 1); exit 1; }; } fi echo "$as_me:$LINENO: result: $ac_cv_build" >&5 echo "${ECHO_T}$ac_cv_build" >&6 build=$ac_cv_build build_cpu=`echo $ac_cv_build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` build_vendor=`echo $ac_cv_build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` build_os=`echo $ac_cv_build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` echo "$as_me:$LINENO: checking host system type" >&5 echo $ECHO_N "checking host system type... $ECHO_C" >&6 if test "${ac_cv_host+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_cv_host_alias=$host_alias test -z "$ac_cv_host_alias" && ac_cv_host_alias=$ac_cv_build_alias ac_cv_host=`$ac_config_sub $ac_cv_host_alias` || { { echo "$as_me:$LINENO: error: $ac_config_sub $ac_cv_host_alias failed" >&5 echo "$as_me: error: $ac_config_sub $ac_cv_host_alias failed" >&2;} { (exit 1); exit 1; }; } fi echo "$as_me:$LINENO: result: $ac_cv_host" >&5 echo "${ECHO_T}$ac_cv_host" >&6 host=$ac_cv_host host_cpu=`echo $ac_cv_host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` host_vendor=`echo $ac_cv_host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` host_os=`echo $ac_cv_host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` echo "$as_me:$LINENO: checking for egrep" >&5 echo $ECHO_N "checking for egrep... $ECHO_C" >&6 if test "${ac_cv_prog_egrep+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if echo a | (grep -E '(a|b)') >/dev/null 2>&1 then ac_cv_prog_egrep='grep -E' else ac_cv_prog_egrep='egrep' fi fi echo "$as_me:$LINENO: result: $ac_cv_prog_egrep" >&5 echo "${ECHO_T}$ac_cv_prog_egrep" >&6 EGREP=$ac_cv_prog_egrep # Check whether --with-gnu-ld or --without-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then withval="$with_gnu_ld" test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi; ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. echo "$as_me:$LINENO: checking for ld used by $CC" >&5 echo $ECHO_N "checking for ld used by $CC... $ECHO_C" >&6 case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`echo $ac_prog| $SED 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then echo "$as_me:$LINENO: checking for GNU ld" >&5 echo $ECHO_N "checking for GNU ld... $ECHO_C" >&6 else echo "$as_me:$LINENO: checking for non-GNU ld" >&5 echo $ECHO_N "checking for non-GNU ld... $ECHO_C" >&6 fi if test "${lt_cv_path_LD+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 echo "${ECHO_T}$LD" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -z "$LD" && { { echo "$as_me:$LINENO: error: no acceptable ld found in \$PATH" >&5 echo "$as_me: error: no acceptable ld found in \$PATH" >&2;} { (exit 1); exit 1; }; } echo "$as_me:$LINENO: checking if the linker ($LD) is GNU ld" >&5 echo $ECHO_N "checking if the linker ($LD) is GNU ld... $ECHO_C" >&6 if test "${lt_cv_prog_gnu_ld+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 &5 echo "${ECHO_T}$lt_cv_prog_gnu_ld" >&6 with_gnu_ld=$lt_cv_prog_gnu_ld # Check whether --enable-shared or --disable-shared was given. if test "${enable_shared+set}" = set; then enableval="$enable_shared" p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS="$lt_save_ifs" ;; esac else enable_shared=yes fi; # Check whether --enable-static or --disable-static was given. if test "${enable_static+set}" = set; then enableval="$enable_static" p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS="$lt_save_ifs" ;; esac else enable_static=yes fi; # Check whether --enable-fast-install or --disable-fast-install was given. if test "${enable_fast_install+set}" = set; then enableval="$enable_fast_install" p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS="$lt_save_ifs" ;; esac else enable_fast_install=yes fi; echo "$as_me:$LINENO: checking for $LD option to reload object files" >&5 echo $ECHO_N "checking for $LD option to reload object files... $ECHO_C" >&6 if test "${lt_cv_ld_reload_flag+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_ld_reload_flag='-r' fi echo "$as_me:$LINENO: result: $lt_cv_ld_reload_flag" >&5 echo "${ECHO_T}$lt_cv_ld_reload_flag" >&6 reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; *) reload_flag=" $reload_flag" ;; esac reload_cmds='$LD$reload_flag -o $output$reload_objs' echo "$as_me:$LINENO: checking for BSD-compatible nm" >&5 echo $ECHO_N "checking for BSD-compatible nm... $ECHO_C" >&6 if test "${lt_cv_path_NM+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM="$NM" else lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin /usr/ucb /bin; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. tmp_nm="$ac_dir/${ac_tool_prefix}nm" if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then # Check to see if the nm accepts a BSD-compat flag. # Adding the `sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in */dev/null* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac esac fi done IFS="$lt_save_ifs" test -z "$lt_cv_path_NM" && lt_cv_path_NM=nm fi fi echo "$as_me:$LINENO: result: $lt_cv_path_NM" >&5 echo "${ECHO_T}$lt_cv_path_NM" >&6 NM="$lt_cv_path_NM" echo "$as_me:$LINENO: checking whether ln -s works" >&5 echo $ECHO_N "checking whether ln -s works... $ECHO_C" >&6 LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no, using $LN_S" >&5 echo "${ECHO_T}no, using $LN_S" >&6 fi echo "$as_me:$LINENO: checking how to recognise dependent libraries" >&5 echo $ECHO_N "checking how to recognise dependent libraries... $ECHO_C" >&6 if test "${lt_cv_deplibs_check_method+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # `unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # which responds to the $file_magic_cmd with a given extended regex. # If you have `file' or equivalent on your system and you're not sure # whether `pass_all' will *always* work, you probably want this one. case $host_os in aix4* | aix5*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi4*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump'. lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | kfreebsd*-gnu) if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD)/i[3-9]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; gnu*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case "$host_cpu" in ia64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]' lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9].[0-9]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be Linux ELF. linux*) case $host_cpu in alpha*|hppa*|i*86|ia64*|m68*|mips*|powerpc*|sparc*|s390*|sh*|x86_64*) lt_cv_deplibs_check_method=pass_all ;; *) # glibc up to 2.1.1 does not perform some relocations on ARM # this will be overridden with pass_all, but let us keep it just in case lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; esac lt_cv_file_magic_test_file=`echo /lib/libc.so* /lib/libc-*.so` lt_cv_deplibs_check_method=pass_all ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; nto-qnx*) lt_cv_deplibs_check_method=unknown ;; openbsd*) lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB shared object' else lt_cv_deplibs_check_method='file_magic OpenBSD.* shared library' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; sco3.2v5*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; esac ;; sysv5OpenUNIX8* | sysv5UnixWare7* | sysv5uw[78]* | unixware7* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; esac fi echo "$as_me:$LINENO: result: $lt_cv_deplibs_check_method" >&5 echo "${ECHO_T}$lt_cv_deplibs_check_method" >&6 file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # Check whether --enable-libtool-lock or --disable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then enableval="$enable_libtool_lock" fi; test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" ;; *ELF-64*) HPUX_IA64_MODE="64" ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out which ABI we are using. echo '#line 3668 "configure"' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*|s390*-*linux*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case "`/usr/bin/file conftest.o`" in *32-bit*) case $host in x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; ppc64-*linux*|powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; ppc*-*linux*|powerpc*-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" echo "$as_me:$LINENO: checking whether the C compiler needs -belf" >&5 echo $ECHO_N "checking whether the C compiler needs -belf... $ECHO_C" >&6 if test "${lt_cv_cc_needs_belf+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then lt_cv_cc_needs_belf=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 lt_cv_cc_needs_belf=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi echo "$as_me:$LINENO: result: $lt_cv_cc_needs_belf" >&5 echo "${ECHO_T}$lt_cv_cc_needs_belf" >&6 if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" fi ;; esac need_locks="$enable_libtool_lock" ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu echo "$as_me:$LINENO: checking how to run the C preprocessor" >&5 echo $ECHO_N "checking how to run the C preprocessor... $ECHO_C" >&6 # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if test "${ac_cv_prog_CPP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi echo "$as_me:$LINENO: result: $CPP" >&5 echo "${ECHO_T}$CPP" >&6 ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { echo "$as_me:$LINENO: error: C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details." >&5 echo "$as_me: error: C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu echo "$as_me:$LINENO: checking for ANSI C header files" >&5 echo $ECHO_N "checking for ANSI C header files... $ECHO_C" >&6 if test "${ac_cv_header_stdc+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_header_stdc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_header_stdc=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) exit(2); exit (0); } _ACEOF rm -f conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='./conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then : else echo "$as_me: program exited with status $ac_status" >&5 echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ( exit $ac_status ) ac_cv_header_stdc=no fi rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi fi fi echo "$as_me:$LINENO: result: $ac_cv_header_stdc" >&5 echo "${ECHO_T}$ac_cv_header_stdc" >&6 if test $ac_cv_header_stdc = yes; then cat >>confdefs.h <<\_ACEOF #define STDC_HEADERS 1 _ACEOF fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then eval "$as_ac_Header=yes" else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 eval "$as_ac_Header=no" fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in dlfcn.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` if eval "test \"\${$as_ac_Header+set}\" = set"; then echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 else # Is the header compilable? echo "$as_me:$LINENO: checking $ac_header usability" >&5 echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_header_compiler=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_compiler=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 echo "${ECHO_T}$ac_header_compiler" >&6 # Is the header present? echo "$as_me:$LINENO: checking $ac_header presence" >&5 echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include <$ac_header> _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then ac_header_preproc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_preproc=no fi rm -f conftest.err conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 echo "${ECHO_T}$ac_header_preproc" >&6 # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in yes:no: ) { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} ac_header_preproc=yes ;; no:yes:* ) { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} ( cat <<\_ASBOX ## ------------------------------------------ ## ## Report this to the libsphinxclient lists. ## ## ------------------------------------------ ## _ASBOX ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else eval "$as_ac_Header=\$ac_header_preproc" fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 fi if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done ac_ext=cc ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu echo "$as_me:$LINENO: checking how to run the C++ preprocessor" >&5 echo $ECHO_N "checking how to run the C++ preprocessor... $ECHO_C" >&6 if test -z "$CXXCPP"; then if test "${ac_cv_prog_CXXCPP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_cxx_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_cxx_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_cxx_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_cxx_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi echo "$as_me:$LINENO: result: $CXXCPP" >&5 echo "${ECHO_T}$CXXCPP" >&6 ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_cxx_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_cxx_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then : else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Broken: fails on valid input. continue fi rm -f conftest.err conftest.$ac_ext # OK, works on sane cases. Now check whether non-existent headers # can be detected and how. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_cxx_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_cxx_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then # Broken: success on invalid input. continue else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { echo "$as_me:$LINENO: error: C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details." >&5 echo "$as_me: error: C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details." >&2;} { (exit 1); exit 1; }; } fi ac_ext=cc ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu # Autoconf 2.13's AC_OBJEXT and AC_EXEEXT macros only works for C compilers! # find the maximum length of command line arguments echo "$as_me:$LINENO: checking the maximum length of command line arguments" >&5 echo $ECHO_N "checking the maximum length of command line arguments... $ECHO_C" >&6 if test "${lt_cv_sys_max_cmd_len+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else i=0 teststring="ABCD" case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; *) # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while (test "X"`$CONFIG_SHELL $0 --fallback-echo "X$teststring" 2>/dev/null` \ = "XX$teststring") >/dev/null 2>&1 && new_result=`expr "X$teststring" : ".*" 2>&1` && lt_cv_sys_max_cmd_len=$new_result && test $i != 17 # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done teststring= # Add a significant safety factor because C++ compilers can tack on massive # amounts of additional arguments before passing them to the linker. # It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` ;; esac fi if test -n $lt_cv_sys_max_cmd_len ; then echo "$as_me:$LINENO: result: $lt_cv_sys_max_cmd_len" >&5 echo "${ECHO_T}$lt_cv_sys_max_cmd_len" >&6 else echo "$as_me:$LINENO: result: none" >&5 echo "${ECHO_T}none" >&6 fi # Check for command to grab the raw symbol name followed by C symbol from nm. echo "$as_me:$LINENO: checking command to parse $NM output from $compiler object" >&5 echo $ECHO_N "checking command to parse $NM output from $compiler object... $ECHO_C" >&6 if test "${lt_cv_sys_global_symbol_pipe+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[BCDEGRST]' # Regexp to match symbols that can be accessed directly from C. sympat='\([_A-Za-z][_A-Za-z0-9]*\)' # Transform the above into a raw symbol and a C symbol. symxfrm='\1 \2\3 \3' # Transform an extracted symbol line into a proper C declaration lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^. .* \(.*\)$/extern int \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (lt_ptr) 0},/p' -e 's/^$symcode \([^ ]*\) \([^ ]*\)$/ {\"\2\", (lt_ptr) \&\2},/p'" # Define system-specific variables. case $host_os in aix*) symcode='[BCDT]' ;; cygwin* | mingw* | pw32*) symcode='[ABCDGISTW]' ;; hpux*) # Its linker distinguishes data from code symbols if test "$host_cpu" = ia64; then symcode='[ABCDEGRST]' fi lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (lt_ptr) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (lt_ptr) \&\2},/p'" ;; irix* | nonstopux*) symcode='[BCDEGRST]' ;; osf*) symcode='[BCDEGQRST]' ;; solaris* | sysv5*) symcode='[BDRT]' ;; sysv4) symcode='[DFNSTU]' ;; esac # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`echo 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[ABCDGIRSTW]' ;; esac # Try without a prefix undercore, then with it. for ac_symprfx in "" "_"; do # Write the raw and C identifiers. lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*\($ac_symprfx\)$sympat$opt_cr$/$symxfrm/p'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # Now try to grab the symbols. nlist=conftest.nm if { (eval echo "$as_me:$LINENO: \"$NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist\"") >&5 (eval $NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if grep ' nm_test_var$' "$nlist" >/dev/null; then if grep ' nm_test_func$' "$nlist" >/dev/null; then cat < conftest.$ac_ext #ifdef __cplusplus extern "C" { #endif EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | grep -v main >> conftest.$ac_ext' cat <> conftest.$ac_ext #if defined (__STDC__) && __STDC__ # define lt_ptr_t void * #else # define lt_ptr_t char * # define const #endif /* The mapping between symbol names and symbols. */ const struct { const char *name; lt_ptr_t address; } lt_preloaded_symbols[] = { EOF $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (lt_ptr_t) \&\2},/" < "$nlist" | grep -v main >> conftest.$ac_ext cat <<\EOF >> conftest.$ac_ext {0, (lt_ptr_t) 0} }; #ifdef __cplusplus } #endif EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_save_LIBS="$LIBS" lt_save_CFLAGS="$CFLAGS" LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext}; then pipe_works=yes fi LIBS="$lt_save_LIBS" CFLAGS="$lt_save_CFLAGS" else echo "cannot find nm_test_func in $nlist" >&5 fi else echo "cannot find nm_test_var in $nlist" >&5 fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 fi else echo "$progname: failed program was:" >&5 cat conftest.$ac_ext >&5 fi rm -f conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test "$pipe_works" = yes; then break else lt_cv_sys_global_symbol_pipe= fi done fi if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then echo "$as_me:$LINENO: result: failed" >&5 echo "${ECHO_T}failed" >&6 else echo "$as_me:$LINENO: result: ok" >&5 echo "${ECHO_T}ok" >&6 fi echo "$as_me:$LINENO: checking for objdir" >&5 echo $ECHO_N "checking for objdir... $ECHO_C" >&6 if test "${lt_cv_objdir+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null fi echo "$as_me:$LINENO: result: $lt_cv_objdir" >&5 echo "${ECHO_T}$lt_cv_objdir" >&6 objdir=$lt_cv_objdir case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. Xsed='sed -e s/^X//' sed_quote_subst='s/\([\\"\\`$\\\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\([\\"\\`\\\\]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' # Constants: rm="rm -f" # Global variables: default_ofile=libtool can_build_shared=yes # All known linkers require a `.a' archive for static linking (except M$VC, # which needs '.lib'). libext=a ltmain="$ac_aux_dir/ltmain.sh" ofile="$default_ofile" with_gnu_ld="$lt_cv_prog_gnu_ld" if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. set dummy ${ac_tool_prefix}ar; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_AR+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="${ac_tool_prefix}ar" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then echo "$as_me:$LINENO: result: $AR" >&5 echo "${ECHO_T}$AR" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_AR"; then ac_ct_AR=$AR # Extract the first word of "ar", so it can be a program name with args. set dummy ar; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_AR+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AR="ar" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_AR" && ac_cv_prog_ac_ct_AR="false" fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then echo "$as_me:$LINENO: result: $ac_ct_AR" >&5 echo "${ECHO_T}$ac_ct_AR" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi AR=$ac_ct_AR else AR="$ac_cv_prog_AR" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_RANLIB+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then echo "$as_me:$LINENO: result: $RANLIB" >&5 echo "${ECHO_T}$RANLIB" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_RANLIB" && ac_cv_prog_ac_ct_RANLIB=":" fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then echo "$as_me:$LINENO: result: $ac_ct_RANLIB" >&5 echo "${ECHO_T}$ac_ct_RANLIB" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi RANLIB=$ac_ct_RANLIB else RANLIB="$ac_cv_prog_RANLIB" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then echo "$as_me:$LINENO: result: $STRIP" >&5 echo "${ECHO_T}$STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 echo "$as_me:$LINENO: checking for $ac_word" >&5 echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6 if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done test -z "$ac_cv_prog_ac_ct_STRIP" && ac_cv_prog_ac_ct_STRIP=":" fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then echo "$as_me:$LINENO: result: $ac_ct_STRIP" >&5 echo "${ECHO_T}$ac_ct_STRIP" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi STRIP=$ac_ct_STRIP else STRIP="$ac_cv_prog_STRIP" fi old_CC="$CC" old_CFLAGS="$CFLAGS" # Set sane defaults for various variables test -z "$AR" && AR=ar test -z "$AR_FLAGS" && AR_FLAGS=cru test -z "$AS" && AS=as test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$DLLTOOL" && DLLTOOL=dlltool test -z "$LD" && LD=ld test -z "$LN_S" && LN_S="ln -s" test -z "$MAGIC_CMD" && MAGIC_CMD=file test -z "$NM" && NM=nm test -z "$SED" && SED=sed test -z "$OBJDUMP" && OBJDUMP=objdump test -z "$RANLIB" && RANLIB=: test -z "$STRIP" && STRIP=: test -z "$ac_objext" && ac_objext=o # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs$old_deplibs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in openbsd*) old_postinstall_cmds="\$RANLIB -t \$oldlib~$old_postinstall_cmds" ;; *) old_postinstall_cmds="\$RANLIB \$oldlib~$old_postinstall_cmds" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" fi # Only perform the check for file, if the check method requires it case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then echo "$as_me:$LINENO: checking for ${ac_tool_prefix}file" >&5 echo $ECHO_N "checking for ${ac_tool_prefix}file... $ECHO_C" >&6 if test "${lt_cv_path_MAGIC_CMD+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/${ac_tool_prefix}file; then lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex="`expr \"$deplibs_check_method\" : \"file_magic \(.*\)\"`" MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then echo "$as_me:$LINENO: result: $MAGIC_CMD" >&5 echo "${ECHO_T}$MAGIC_CMD" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then echo "$as_me:$LINENO: checking for file" >&5 echo $ECHO_N "checking for file... $ECHO_C" >&6 if test "${lt_cv_path_MAGIC_CMD+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/file; then lt_cv_path_MAGIC_CMD="$ac_dir/file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex="`expr \"$deplibs_check_method\" : \"file_magic \(.*\)\"`" MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then echo "$as_me:$LINENO: result: $MAGIC_CMD" >&5 echo "${ECHO_T}$MAGIC_CMD" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi else MAGIC_CMD=: fi fi fi ;; esac enable_dlopen=no enable_win32_dll=no # Check whether --enable-libtool-lock or --disable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then enableval="$enable_libtool_lock" fi; test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Check whether --with-pic or --without-pic was given. if test "${with_pic+set}" = set; then withval="$with_pic" pic_mode="$withval" else pic_mode=default fi; test -z "$pic_mode" && pic_mode=default # Use C for the default configuration in the libtool script tagname= lt_save_CC="$CC" ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o objext=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;\n" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}\n' # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # # Check for any special shared library compilation flags. # lt_prog_cc_shlib= if test "$GCC" = no; then case $host_os in sco3.2v5*) lt_prog_cc_shlib='-belf' ;; esac fi if test -n "$lt_prog_cc_shlib"; then { echo "$as_me:$LINENO: WARNING: \`$CC' requires \`$lt_prog_cc_shlib' to build shared libraries" >&5 echo "$as_me: WARNING: \`$CC' requires \`$lt_prog_cc_shlib' to build shared libraries" >&2;} if echo "$old_CC $old_CFLAGS " | grep "[ ]$lt_prog_cc_shlib[ ]" >/dev/null; then : else { echo "$as_me:$LINENO: WARNING: add \`$lt_prog_cc_shlib' to the CC or CFLAGS env variable and reconfigure" >&5 echo "$as_me: WARNING: add \`$lt_prog_cc_shlib' to the CC or CFLAGS env variable and reconfigure" >&2;} lt_cv_prog_cc_can_build_shared=no fi fi # # Check to make sure the static flag actually works. # echo "$as_me:$LINENO: checking if $compiler static flag $lt_prog_compiler_static works" >&5 echo $ECHO_N "checking if $compiler static flag $lt_prog_compiler_static works... $ECHO_C" >&6 if test "${lt_prog_compiler_static_works+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_prog_compiler_static_works=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $lt_prog_compiler_static" printf "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 else lt_prog_compiler_static_works=yes fi fi $rm conftest* LDFLAGS="$save_LDFLAGS" fi echo "$as_me:$LINENO: result: $lt_prog_compiler_static_works" >&5 echo "${ECHO_T}$lt_prog_compiler_static_works" >&6 if test x"$lt_prog_compiler_static_works" = xyes; then : else lt_prog_compiler_static= fi lt_prog_compiler_no_builtin_flag= if test "$GCC" = yes; then lt_prog_compiler_no_builtin_flag=' -fno-builtin' echo "$as_me:$LINENO: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 echo $ECHO_N "checking if $compiler supports -fno-rtti -fno-exceptions... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:5576: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:5580: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_rtti_exceptions" >&6 if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl= lt_prog_compiler_pic= lt_prog_compiler_static= echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6 if test "$GCC" = yes; then lt_prog_compiler_wl='-Wl,' lt_prog_compiler_static='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' fi ;; amigaos*) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ;; beos* | cygwin* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared=no enable_shared=no ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic=-Kconform_pic fi ;; hpux*) # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='-fPIC' ;; esac ;; *) lt_prog_compiler_pic='-fPIC' ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' else lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static='-non_shared' ;; newsos6) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; linux*) case $CC in icc* | ecc*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-static' ;; ccc*) lt_prog_compiler_wl='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static='-non_shared' ;; esac ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static='-non_shared' ;; sco3.2v5*) lt_prog_compiler_pic='-Kpic' lt_prog_compiler_static='-dn' ;; solaris*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sunos4*) lt_prog_compiler_wl='-Qoption ld ' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then lt_prog_compiler_pic='-Kconform_pic' lt_prog_compiler_static='-Bstatic' fi ;; uts4*) lt_prog_compiler_pic='-pic' lt_prog_compiler_static='-Bstatic' ;; *) lt_prog_compiler_can_build_shared=no ;; esac fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic" >&5 echo "${ECHO_T}$lt_prog_compiler_pic" >&6 # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic works... $ECHO_C" >&6 if test "${lt_prog_compiler_pic_works+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:5809: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:5813: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_prog_compiler_pic_works=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_works" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_works" >&6 if test x"$lt_prog_compiler_pic_works" = xyes; then case $lt_prog_compiler_pic in "" | " "*) ;; *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; esac else lt_prog_compiler_pic= lt_prog_compiler_can_build_shared=no fi fi case "$host_os" in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic= ;; *) lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_c_o+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_c_o=no $rm -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:5869: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:5873: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s out/conftest.err; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . $rm conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $rm out/ii_files/* && rmdir out/ii_files $rm out/* && rmdir out cd .. rmdir conftest $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_c_o" >&6 hard_links="nottested" if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6 hard_links=yes $rm conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no echo "$as_me:$LINENO: result: $hard_links" >&5 echo "${ECHO_T}$hard_links" >&6 if test "$hard_links" = no; then { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6 runpath_var= allow_undefined_flag= enable_shared_with_static_runtimes=no archive_cmds= archive_expsym_cmds= old_archive_From_new_cmds= old_archive_from_expsyms_cmds= export_dynamic_flag_spec= whole_archive_flag_spec= thread_safe_flag_spec= hardcode_libdir_flag_spec= hardcode_libdir_flag_spec_ld= hardcode_libdir_separator= hardcode_direct=no hardcode_minus_L=no hardcode_shlibpath_var=unsupported link_all_deplibs=unknown hardcode_automatic=no module_cmds= module_expsym_cmds= always_export_symbols=no export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. exclude_expsyms="_GLOBAL_OFFSET_TABLE_" # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # See if GNU ld supports shared libraries. case $host_os in aix3* | aix4* | aix5*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no cat <&2 *** Warning: the GNU linker, at least up to release 2.9.1, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to modify your PATH *** so that a non-GNU linker is found, and then restart. EOF fi ;; amigaos*) archive_cmds='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # Samuel A. Falvo II reports # that the semantics of dynamic libraries on AmigaOS, at least up # to version 4, is to share data among multiple programs linked # with the same dynamic library. Since this doesn't match the # behavior of shared libraries on other platforms, we can't use # them. ld_shlibs=no ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then allow_undefined_flag=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs=no fi ;; cygwin* | mingw* | pw32*) # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGS] /s/.* \([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW] /s/.* //'\'' | sort | uniq > $export_symbols' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' else ld_shlibs=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris* | sysv5*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs=no cat <&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. EOF elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; sunos4*) archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct=yes hardcode_shlibpath_var=no ;; linux*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then tmp_archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_cmds="$tmp_archive_cmds" supports_anon_versioning=no case `$LD -v 2>/dev/null` in *\ 01.* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac if test $supports_anon_versioning = yes; then archive_expsym_cmds='$echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ $echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' else archive_expsym_cmds="$tmp_archive_cmds" fi else ld_shlibs=no fi ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = yes; then runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | grep 'no-whole-archive' > /dev/null; then whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec= fi fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag=unsupported always_export_symbols=yes archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes && test -z "$link_static_flag"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix4* | aix5*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | grep 'GNU' > /dev/null; then export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix5*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds='' hardcode_direct=yes hardcode_libdir_separator=':' link_all_deplibs=yes if test "$GCC" = yes; then case $host_os in aix4.012|aix4.012.*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 hardcode_direct=yes else # We have old collect2 hardcode_direct=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi esac shared_flag='-shared' else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag="-z nodefs" archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag=' ${wl}-bernotok' allow_undefined_flag=' ${wl}-berok' # -bexpall does not export symbols beginning with underscore (_) always_export_symbols=yes # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec=' ' archive_cmds_need_lc=yes # This is similar to how AIX traditionally builds it's shared libraries. archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}-bE:$export_symbols ${wl}-bnoentry${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) archive_cmds='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # see comment about different semantics on the GNU ld section ld_shlibs=no ;; bsdi4*) export_dynamic_flag_spec=-rdynamic ;; cygwin* | mingw* | pw32*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $lib $libobjs $compiler_flags `echo "$deplibs" | $SED -e '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_From_new_cmds='true' # FIXME: Should let the user specify the lib program. old_archive_cmds='lib /OUT:$oldlib$oldobjs$old_deplibs' fix_srcfile_path='`cygpath -w "$srcfile"`' enable_shared_with_static_runtimes=yes ;; darwin* | rhapsody*) if test "$GXX" = yes ; then archive_cmds_need_lc=no case "$host_os" in rhapsody* | darwin1.[012]) allow_undefined_flag='-undefined suppress' ;; *) # Darwin 1.3 on if test -z ${MACOSX_DEPLOYMENT_TARGET} ; then allow_undefined_flag='-flat_namespace -undefined suppress' else case ${MACOSX_DEPLOYMENT_TARGET} in 10.[012]) allow_undefined_flag='-flat_namespace -undefined suppress' ;; 10.*) allow_undefined_flag='-undefined dynamic_lookup' ;; esac fi ;; esac lt_int_apple_cc_single_mod=no output_verbose_link_cmd='echo' if $CC -dumpspecs 2>&1 | grep 'single_module' >/dev/null ; then lt_int_apple_cc_single_mod=yes fi if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_cmds='$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring' else archive_cmds='$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring' fi module_cmds='$CC ${wl}-bind_at_load $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin ld's if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_expsym_cmds='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' else archive_expsym_cmds='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' fi module_expsym_cmds='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported whole_archive_flag_spec='-all_load $convenience' link_all_deplibs=yes else ld_shlibs=no fi ;; dgux*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; freebsd1*) ld_shlibs=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | kfreebsd*-gnu) archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; hpux9*) if test "$GCC" = yes; then archive_cmds='$rm $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds='$rm $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes export_dynamic_flag_spec='${wl}-E' ;; hpux10* | hpux11*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*|ia64*) archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case "$host_cpu" in hppa*64*|ia64*) archive_cmds='$LD -b +h $soname -o $lib $libobjs $deplibs $linker_flags' ;; *) archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' ;; esac fi if test "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_flag_spec_ld='+b $libdir' hardcode_libdir_separator=: hardcode_direct=no hardcode_shlibpath_var=no ;; ia64*) hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=no hardcode_shlibpath_var=no # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; *) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes export_dynamic_flag_spec='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_ld='-rpath $libdir' fi hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: link_all_deplibs=yes ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; newsos6) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: hardcode_shlibpath_var=no ;; openbsd*) hardcode_direct=yes hardcode_shlibpath_var=no if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' export_dynamic_flag_spec='${wl}-E' else case $host_os in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-R$libdir' ;; *) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported archive_cmds='$echo "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$echo "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$echo DATA >> $output_objdir/$libname.def~$echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~$echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' old_archive_From_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' fi hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; echo "-hidden">> $lib.exp~ $LD -shared${allow_undefined_flag} -input $lib.exp $linker_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${objdir}/so_locations -o $lib~$rm $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi hardcode_libdir_separator=: ;; sco3.2v5*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='${wl}-Bexport' runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ;; solaris*) no_undefined_flag=' -z text' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -shared ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$rm $lib.exp' else archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' fi hardcode_libdir_flag_spec='-R$libdir' hardcode_shlibpath_var=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; sysv4) case $host_vendor in sni) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds='$CC -r -o $output$reload_objs' hardcode_direct=no ;; motorola) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv4.3*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs=yes fi ;; sysv4.2uw2*) archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=no hardcode_shlibpath_var=no hardcode_runpath_var=yes runpath_var=LD_RUN_PATH ;; sysv5OpenUNIX8* | sysv5UnixWare7* | sysv5uw[78]* | unixware7*) no_undefined_flag='${wl}-z ${wl}text' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' fi runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv5*) no_undefined_flag=' -z text' # $CC -shared without GNU ld will not create a library from C++ # object files and a static libstdc++, better avoid it by now archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' hardcode_libdir_flag_spec= hardcode_shlibpath_var=no runpath_var='LD_RUN_PATH' ;; uts4*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; *) ld_shlibs=no ;; esac fi echo "$as_me:$LINENO: result: $ld_shlibs" >&5 echo "${ECHO_T}$ld_shlibs" >&6 test "$ld_shlibs" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc" in x|xyes) # Assume -lc should be added archive_cmds_need_lc=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6 $rm conftest* printf "$lt_simple_compile_test_code" > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= if { (eval echo "$as_me:$LINENO: \"$archive_cmds 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1\"") >&5 (eval $archive_cmds 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } then archive_cmds_need_lc=no else archive_cmds_need_lc=yes fi allow_undefined_flag=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $rm conftest* echo "$as_me:$LINENO: result: $archive_cmds_need_lc" >&5 echo "${ECHO_T}$archive_cmds_need_lc" >&6 ;; esac fi ;; esac echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6 library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix4* | aix5*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | grep yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi4*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i;echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $rm \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/./-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='$(test .$module = .yes && echo .so || echo .dylib)' # Apple's gcc prints 'gcc -print-search-dirs' doesn't operate the same. if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | tr "\n" "$PATH_SEPARATOR" | sed -e 's/libraries:/@libraries:/' | tr "@" "\n" | grep "^libraries:" | sed -e "s/^libraries://" -e "s,=/,/,g" -e "s,$PATH_SEPARATOR, ,g" -e "s,.*,& /lib /usr/lib /usr/local/lib,g"` else sys_lib_search_path_spec='/lib /usr/lib /usr/local/lib' fi sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; kfreebsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; freebsd*) objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout` version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.01* | freebsdelf3.01*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; *) # from 3.2 on shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case "$host_cpu" in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # find out which ABI we are using libsuff= case "$host_cpu" in x86_64*|s390x*|powerpc64*) echo '#line 7203 "configure"' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case `/usr/bin/file conftest.$ac_objext` in *64-bit*) libsuff=64 sys_lib_search_path_spec="/lib${libsuff} /usr/lib${libsuff} /usr/local/lib${libsuff}" ;; esac fi rm -rf conftest* ;; esac # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`$SED -e 's/:,\t/ /g;s/=^=*$//;s/=^= * / /g' /etc/ld.so.conf | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib${libsuff} /usr/lib${libsuff} $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; knetbsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; nto-qnx*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; openbsd*) version_type=sunos need_lib_prefix=no need_version=yes library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; sco3.2v5*) version_type=osf soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no export_dynamic_flag_spec='${wl}-Blargedynsym' runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac echo "$as_me:$LINENO: result: $dynamic_linker" >&5 echo "${ECHO_T}$dynamic_linker" >&6 test "$dynamic_linker" = no && can_build_shared=no echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6 hardcode_action= if test -n "$hardcode_libdir_flag_spec" || \ test -n "$runpath_var " || \ test "X$hardcode_automatic"="Xyes" ; then # We can hardcode non-existant directories. if test "$hardcode_direct" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_AC_TAGVAR(hardcode_shlibpath_var, )" != no && test "$hardcode_minus_L" != no; then # Linking always hardcodes the temporary library directory. hardcode_action=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action=unsupported fi echo "$as_me:$LINENO: result: $hardcode_action" >&5 echo "${ECHO_T}$hardcode_action" >&6 if test "$hardcode_action" = relink; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi striplib= old_striplib= echo "$as_me:$LINENO: checking whether stripping libraries is possible" >&5 echo $ECHO_N "checking whether stripping libraries is possible... $ECHO_C" >&6 if test -n "$STRIP" && $STRIP -V 2>&1 | grep "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi ;; *) echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 ;; esac fi if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; *) echo "$as_me:$LINENO: checking for shl_load" >&5 echo $ECHO_N "checking for shl_load... $ECHO_C" >&6 if test "${ac_cv_func_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define shl_load to an innocuous variant, in case declares shl_load. For example, HP-UX 11i declares gettimeofday. */ #define shl_load innocuous_shl_load /* System header to define __stub macros and hopefully few prototypes, which can conflict with char shl_load (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef shl_load /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_shl_load) || defined (__stub___shl_load) choke me #else char (*f) () = shl_load; #endif #ifdef __cplusplus } #endif int main () { return f != shl_load; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_shl_load" >&5 echo "${ECHO_T}$ac_cv_func_shl_load" >&6 if test $ac_cv_func_shl_load = yes; then lt_cv_dlopen="shl_load" else echo "$as_me:$LINENO: checking for shl_load in -ldld" >&5 echo $ECHO_N "checking for shl_load in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); int main () { shl_load (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_shl_load" >&5 echo "${ECHO_T}$ac_cv_lib_dld_shl_load" >&6 if test $ac_cv_lib_dld_shl_load = yes; then lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-dld" else echo "$as_me:$LINENO: checking for dlopen" >&5 echo $ECHO_N "checking for dlopen... $ECHO_C" >&6 if test "${ac_cv_func_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define dlopen to an innocuous variant, in case declares dlopen. For example, HP-UX 11i declares gettimeofday. */ #define dlopen innocuous_dlopen /* System header to define __stub macros and hopefully few prototypes, which can conflict with char dlopen (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef dlopen /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_dlopen) || defined (__stub___dlopen) choke me #else char (*f) () = dlopen; #endif #ifdef __cplusplus } #endif int main () { return f != dlopen; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_dlopen" >&5 echo "${ECHO_T}$ac_cv_func_dlopen" >&6 if test $ac_cv_func_dlopen = yes; then lt_cv_dlopen="dlopen" else echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else echo "$as_me:$LINENO: checking for dlopen in -lsvld" >&5 echo $ECHO_N "checking for dlopen in -lsvld... $ECHO_C" >&6 if test "${ac_cv_lib_svld_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_svld_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_svld_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_svld_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_svld_dlopen" >&6 if test $ac_cv_lib_svld_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else echo "$as_me:$LINENO: checking for dld_link in -ldld" >&5 echo $ECHO_N "checking for dld_link in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_dld_link+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dld_link (); int main () { dld_link (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_dld_link=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_dld_link=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_dld_link" >&5 echo "${ECHO_T}$ac_cv_lib_dld_dld_link" >&6 if test $ac_cv_lib_dld_dld_link = yes; then lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-dld" fi fi fi fi fi fi ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" echo "$as_me:$LINENO: checking whether a program can dlopen itself" >&5 echo $ECHO_N "checking whether a program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self" >&5 echo "${ECHO_T}$lt_cv_dlopen_self" >&6 if test "x$lt_cv_dlopen_self" = xyes; then LDFLAGS="$LDFLAGS $link_static_flag" echo "$as_me:$LINENO: checking whether a statically linked program can dlopen itself" >&5 echo $ECHO_N "checking whether a statically linked program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self_static+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self_static" >&5 echo "${ECHO_T}$lt_cv_dlopen_self_static" >&6 fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi # Report which librarie types wil actually be built echo "$as_me:$LINENO: checking if libtool supports shared libraries" >&5 echo $ECHO_N "checking if libtool supports shared libraries... $ECHO_C" >&6 echo "$as_me:$LINENO: result: $can_build_shared" >&5 echo "${ECHO_T}$can_build_shared" >&6 echo "$as_me:$LINENO: checking whether to build shared libraries" >&5 echo $ECHO_N "checking whether to build shared libraries... $ECHO_C" >&6 test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case "$host_os" in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix4* | aix5*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; darwin* | rhapsody*) if test "$GCC" = yes; then archive_cmds_need_lc=no case "$host_os" in rhapsody* | darwin1.[012]) allow_undefined_flag='-undefined suppress' ;; *) # Darwin 1.3 on if test -z ${MACOSX_DEPLOYMENT_TARGET} ; then allow_undefined_flag='-flat_namespace -undefined suppress' else case ${MACOSX_DEPLOYMENT_TARGET} in 10.[012]) allow_undefined_flag='-flat_namespace -undefined suppress' ;; 10.*) allow_undefined_flag='-undefined dynamic_lookup' ;; esac fi ;; esac output_verbose_link_cmd='echo' archive_cmds='$CC -dynamiclib $allow_undefined_flag -o $lib $libobjs $deplibs$compiler_flags -install_name $rpath/$soname $verstring' module_cmds='$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin ld's archive_expsym_cmds='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -dynamiclib $allow_undefined_flag -o $lib $libobjs $deplibs$compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' module_expsym_cmds='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported whole_archive_flag_spec='-all_load $convenience' link_all_deplibs=yes else ld_shlibs=no fi ;; esac echo "$as_me:$LINENO: result: $enable_shared" >&5 echo "${ECHO_T}$enable_shared" >&6 echo "$as_me:$LINENO: checking whether to build static libraries" >&5 echo $ECHO_N "checking whether to build static libraries... $ECHO_C" >&6 # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes echo "$as_me:$LINENO: result: $enable_static" >&5 echo "${ECHO_T}$enable_static" >&6 # The else clause should only fire when bootstrapping the # libtool distribution, otherwise you forgot to ship ltmain.sh # with your package, and you will get complaints that there are # no rules to generate ltmain.sh. if test -f "$ltmain"; then # See if we are running on zsh, and set the options which allow our commands through # without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi # Now quote all the things that may contain metacharacters while being # careful not to overquote the AC_SUBSTed values. We take copies of the # variables and quote the copies for generation of the libtool script. for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC NM \ SED SHELL STRIP \ libname_spec library_names_spec soname_spec extract_expsyms_cmds \ old_striplib striplib file_magic_cmd finish_cmds finish_eval \ deplibs_check_method reload_flag reload_cmds need_locks \ lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ old_postinstall_cmds old_postuninstall_cmds \ compiler \ CC \ LD \ lt_prog_compiler_wl \ lt_prog_compiler_pic \ lt_prog_compiler_static \ lt_prog_compiler_no_builtin_flag \ export_dynamic_flag_spec \ thread_safe_flag_spec \ whole_archive_flag_spec \ enable_shared_with_static_runtimes \ old_archive_cmds \ old_archive_from_new_cmds \ predep_objects \ postdep_objects \ predeps \ postdeps \ compiler_lib_search_path \ archive_cmds \ archive_expsym_cmds \ postinstall_cmds \ postuninstall_cmds \ old_archive_from_expsyms_cmds \ allow_undefined_flag \ no_undefined_flag \ export_symbols_cmds \ hardcode_libdir_flag_spec \ hardcode_libdir_flag_spec_ld \ hardcode_libdir_separator \ hardcode_automatic \ module_cmds \ module_expsym_cmds \ lt_cv_prog_compiler_c_o \ exclude_expsyms \ include_expsyms; do case $var in old_archive_cmds | \ old_archive_from_new_cmds | \ archive_cmds | \ archive_expsym_cmds | \ module_cmds | \ module_expsym_cmds | \ old_archive_from_expsyms_cmds | \ export_symbols_cmds | \ extract_expsyms_cmds | reload_cmds | finish_cmds | \ postinstall_cmds | postuninstall_cmds | \ old_postinstall_cmds | old_postuninstall_cmds | \ sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) # Double-quote double-evaled strings. eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ;; *) eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" ;; esac done case $lt_echo in *'\$0 --fallback-echo"') lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\$0 --fallback-echo"$/$0 --fallback-echo"/'` ;; esac cfgfile="${ofile}T" trap "$rm \"$cfgfile\"; exit 1" 1 2 15 $rm -f "$cfgfile" { echo "$as_me:$LINENO: creating $ofile" >&5 echo "$as_me: creating $ofile" >&6;} cat <<__EOF__ >> "$cfgfile" #! $SHELL # `$echo "$cfgfile" | sed 's%^.*/%%'` - Provide generalized library-building support services. # Generated automatically by $PROGRAM (GNU $PACKAGE $VERSION$TIMESTAMP) # NOTE: Changes made to this file will be lost: look at ltmain.sh. # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001 # Free Software Foundation, Inc. # # This file is part of GNU Libtool: # Originally by Gordon Matzigkeit , 1996 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # A sed program that does not truncate output. SED=$lt_SED # Sed that helps us avoid accidentally triggering echo(1) options like -n. Xsed="$SED -e s/^X//" # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. if test "X\${CDPATH+set}" = Xset; then CDPATH=:; export CDPATH; fi # The names of the tagged configurations supported by this script. available_tags= # ### BEGIN LIBTOOL CONFIG # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc # Whether or not to disallow shared libs when runtime libs are static allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host # An echo program that does not interpret backslashes. echo=$lt_echo # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A C compiler. LTCC=$lt_LTCC # A language-specific compiler. CC=$lt_compiler # Is the compiler the GNU C compiler? with_gcc=$GCC # An ERE matcher. EGREP=$lt_EGREP # The linker used to build libraries. LD=$lt_LD # Whether we need hard or soft links. LN_S=$lt_LN_S # A BSD-compatible nm program. NM=$lt_NM # A symbol stripping program STRIP=$lt_STRIP # Used to examine libraries when file_magic_cmd begins "file" MAGIC_CMD=$MAGIC_CMD # Used on cygwin: DLL creation program. DLLTOOL="$DLLTOOL" # Used on cygwin: object dumper. OBJDUMP="$OBJDUMP" # Used on cygwin: assembler. AS="$AS" # The name of the directory that contains temporary libtool files. objdir=$objdir # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl # Object file suffix (normally "o"). objext="$ac_objext" # Old archive suffix (normally "a"). libext="$libext" # Shared library suffix (normally ".so"). shrext_cmds='$shrext_cmds' # Executable file suffix (normally ""). exeext="$exeext" # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic pic_mode=$pic_mode # What is the maximum length of a command? max_cmd_len=$lt_cv_sys_max_cmd_len # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o # Must we lock files when doing compilation ? need_locks=$lt_need_locks # Do we need the lib prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec # Compiler flag to generate thread-safe objects. thread_safe_flag_spec=$lt_thread_safe_flag_spec # Library versioning type. version_type=$version_type # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME. library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Commands used to build and install an old-style archive. RANLIB=$lt_RANLIB old_archive_cmds=$lt_old_archive_cmds old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds # Commands used to build and install a shared archive. archive_cmds=$lt_archive_cmds archive_expsym_cmds=$lt_archive_expsym_cmds postinstall_cmds=$lt_postinstall_cmds postuninstall_cmds=$lt_postuninstall_cmds # Commands used to build a loadable module (assumed same as above if empty) module_cmds=$lt_module_cmds module_expsym_cmds=$lt_module_expsym_cmds # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # Dependencies to place before the objects being linked to create a # shared library. predep_objects=$lt_predep_objects # Dependencies to place after the objects being linked to create a # shared library. postdep_objects=$lt_postdep_objects # Dependencies to place before the objects being linked to create a # shared library. predeps=$lt_predeps # Dependencies to place after the objects being linked to create a # shared library. postdeps=$lt_postdeps # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == file_magic. file_magic_cmd=$lt_file_magic_cmd # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag # Flag that forces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # Same as above, but a single script fragment to be evaled but not shown. finish_eval=$lt_finish_eval # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # This is the shared library runtime path variable. runpath_var=$runpath_var # This is the shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist. hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec # If ld is used when linking, flag to hardcode \$libdir into # a binary during linking. This must work even if \$libdir does # not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld # Whether we need a single -rpath flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator # Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the # resulting binary. hardcode_direct=$hardcode_direct # Set to yes if using the -LDIR flag during linking hardcodes DIR into the # resulting binary. hardcode_minus_L=$hardcode_minus_L # Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into # the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var # Set to yes if building a shared library automatically hardcodes DIR into the library # and all subsequent libraries and executables linked against it. hardcode_automatic=$hardcode_automatic # Variables whose values should be saved in libtool wrapper scripts and # restored at relink time. variables_saved_for_relink="$variables_saved_for_relink" # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs # Compile-time system search path for libraries sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path="$fix_srcfile_path" # Set to yes if exported symbols are required. always_export_symbols=$always_export_symbols # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms # Symbols that must always be exported. include_expsyms=$lt_include_expsyms # ### END LIBTOOL CONFIG __EOF__ case $host_os in aix3*) cat <<\EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi EOF ;; esac # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || \ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" else # If there is no Makefile yet, we rely on a make rule to execute # `config.status --recheck' to rerun these tests and create the # libtool script then. ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` if test -f "$ltmain_in"; then test -f Makefile && make "$ltmain" fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC="$lt_save_CC" # Check whether --with-tags or --without-tags was given. if test "${with_tags+set}" = set; then withval="$with_tags" tagnames="$withval" fi; if test -f "$ltmain" && test -n "$tagnames"; then if test ! -f "${ofile}"; then { echo "$as_me:$LINENO: WARNING: output file \`$ofile' does not exist" >&5 echo "$as_me: WARNING: output file \`$ofile' does not exist" >&2;} fi if test -z "$LTCC"; then eval "`$SHELL ${ofile} --config | grep '^LTCC='`" if test -z "$LTCC"; then { echo "$as_me:$LINENO: WARNING: output file \`$ofile' does not look like a libtool script" >&5 echo "$as_me: WARNING: output file \`$ofile' does not look like a libtool script" >&2;} else { echo "$as_me:$LINENO: WARNING: using \`LTCC=$LTCC', extracted from \`$ofile'" >&5 echo "$as_me: WARNING: using \`LTCC=$LTCC', extracted from \`$ofile'" >&2;} fi fi # Extract list of available tagged configurations in $ofile. # Note that this assumes the entire list is on one line. available_tags=`grep "^available_tags=" "${ofile}" | $SED -e 's/available_tags=\(.*$\)/\1/' -e 's/\"//g'` lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for tagname in $tagnames; do IFS="$lt_save_ifs" # Check whether tagname contains only valid characters case `$echo "X$tagname" | $Xsed -e 's:[-_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890,/]::g'` in "") ;; *) { { echo "$as_me:$LINENO: error: invalid tag name: $tagname" >&5 echo "$as_me: error: invalid tag name: $tagname" >&2;} { (exit 1); exit 1; }; } ;; esac if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "${ofile}" > /dev/null then { { echo "$as_me:$LINENO: error: tag name \"$tagname\" already exists" >&5 echo "$as_me: error: tag name \"$tagname\" already exists" >&2;} { (exit 1); exit 1; }; } fi # Update the list of available tags. if test -n "$tagname"; then echo appending configuration tag \"$tagname\" to $ofile case $tagname in CXX) if test -n "$CXX" && test "X$CXX" != "Xno"; then ac_ext=cc ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu archive_cmds_need_lc_CXX=no allow_undefined_flag_CXX= always_export_symbols_CXX=no archive_expsym_cmds_CXX= export_dynamic_flag_spec_CXX= hardcode_direct_CXX=no hardcode_libdir_flag_spec_CXX= hardcode_libdir_flag_spec_ld_CXX= hardcode_libdir_separator_CXX= hardcode_minus_L_CXX=no hardcode_automatic_CXX=no module_cmds_CXX= module_expsym_cmds_CXX= link_all_deplibs_CXX=unknown old_archive_cmds_CXX=$old_archive_cmds no_undefined_flag_CXX= whole_archive_flag_spec_CXX= enable_shared_with_static_runtimes_CXX=no # Dependencies to place before and after the object being linked: predep_objects_CXX= postdep_objects_CXX= predeps_CXX= postdeps_CXX= compiler_lib_search_path_CXX= # Source file extension for C++ test sources. ac_ext=cc # Object file extension for compiled C++ test sources. objext=o objext_CXX=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;\n" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *) { return(0); }\n' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} compiler=$CC compiler_CXX=$CC cc_basename=`$echo X"$compiler" | $Xsed -e 's%^.*/%%'` # We don't want -fno-exception wen compiling C++ code, so set the # no_builtin_flag separately if test "$GXX" = yes; then lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin' else lt_prog_compiler_no_builtin_flag_CXX= fi if test "$GXX" = yes; then # Set up default GNU C++ configuration # Check whether --with-gnu-ld or --without-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then withval="$with_gnu_ld" test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi; ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. echo "$as_me:$LINENO: checking for ld used by $CC" >&5 echo $ECHO_N "checking for ld used by $CC... $ECHO_C" >&6 case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`echo $ac_prog| $SED 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then echo "$as_me:$LINENO: checking for GNU ld" >&5 echo $ECHO_N "checking for GNU ld... $ECHO_C" >&6 else echo "$as_me:$LINENO: checking for non-GNU ld" >&5 echo $ECHO_N "checking for non-GNU ld... $ECHO_C" >&6 fi if test "${lt_cv_path_LD+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 echo "${ECHO_T}$LD" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi test -z "$LD" && { { echo "$as_me:$LINENO: error: no acceptable ld found in \$PATH" >&5 echo "$as_me: error: no acceptable ld found in \$PATH" >&2;} { (exit 1); exit 1; }; } echo "$as_me:$LINENO: checking if the linker ($LD) is GNU ld" >&5 echo $ECHO_N "checking if the linker ($LD) is GNU ld... $ECHO_C" >&6 if test "${lt_cv_prog_gnu_ld+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else # I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 &5 echo "${ECHO_T}$lt_cv_prog_gnu_ld" >&6 with_gnu_ld=$lt_cv_prog_gnu_ld # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='${wl}' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | \ grep 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec_CXX= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6 ld_shlibs_CXX=yes case $host_os in aix3*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aix4* | aix5*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix5*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_CXX='' hardcode_direct_CXX=yes hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes if test "$GXX" = yes; then case $host_os in aix4.012|aix4.012.*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 hardcode_direct_CXX=yes else # We have old collect2 hardcode_direct_CXX=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_CXX=yes hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_libdir_separator_CXX= fi esac shared_flag='-shared' else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols_CXX=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag_CXX='-berok' # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_CXX="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag_CXX="-z nodefs" archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_CXX=' ${wl}-bernotok' allow_undefined_flag_CXX=' ${wl}-berok' # -bexpall does not export symbols beginning with underscore (_) always_export_symbols_CXX=yes # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_CXX=' ' archive_cmds_need_lc_CXX=yes # This is similar to how AIX traditionally builds it's shared libraries. archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}-bE:$export_symbols ${wl}-bnoentry${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; cygwin* | mingw* | pw32*) # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_CXX='-L$libdir' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=no enable_shared_with_static_runtimes_CXX=yes if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' else ld_shlibs_CXX=no fi ;; darwin* | rhapsody*) if test "$GXX" = yes; then archive_cmds_need_lc_CXX=no case "$host_os" in rhapsody* | darwin1.[012]) allow_undefined_flag_CXX='-undefined suppress' ;; *) # Darwin 1.3 on if test -z ${MACOSX_DEPLOYMENT_TARGET} ; then allow_undefined_flag_CXX='-flat_namespace -undefined suppress' else case ${MACOSX_DEPLOYMENT_TARGET} in 10.[012]) allow_undefined_flag_CXX='-flat_namespace -undefined suppress' ;; 10.*) allow_undefined_flag_CXX='-undefined dynamic_lookup' ;; esac fi ;; esac lt_int_apple_cc_single_mod=no output_verbose_link_cmd='echo' if $CC -dumpspecs 2>&1 | grep 'single_module' >/dev/null ; then lt_int_apple_cc_single_mod=yes fi if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_cmds_CXX='$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring' else archive_cmds_CXX='$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring' fi module_cmds_CXX='$CC ${wl}-bind_at_load $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin ld's if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_expsym_cmds_CXX='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' else archive_expsym_cmds_CXX='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' fi module_expsym_cmds_CXX='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' hardcode_direct_CXX=no hardcode_automatic_CXX=yes hardcode_shlibpath_var_CXX=unsupported whole_archive_flag_spec_CXX='-all_load $convenience' link_all_deplibs_CXX=yes else ld_shlibs_CXX=no fi ;; dgux*) case $cc_basename in ec++) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; ghcx) # Green Hills C++ Compiler # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; freebsd12*) # C++ shared libraries reported to be fairly broken before switch to ELF ld_shlibs_CXX=no ;; freebsd-elf*) archive_cmds_need_lc_CXX=no ;; freebsd* | kfreebsd*-gnu) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions ld_shlibs_CXX=yes ;; gnu*) ;; hpux9*) hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='${wl}-E' hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC) archive_cmds_CXX='$rm $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | grep "-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' ;; *) if test "$GXX" = yes; then archive_cmds_CXX='$rm $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; hpux10*|hpux11*) if test $with_gnu_ld = no; then case "$host_cpu" in hppa*64*) hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_flag_spec_ld_CXX='+b $libdir' hardcode_libdir_separator_CXX=: ;; ia64*) hardcode_libdir_flag_spec_CXX='-L$libdir' ;; *) hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='${wl}-E' ;; esac fi case "$host_cpu" in hppa*64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no ;; ia64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; *) hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC) case "$host_cpu" in hppa*64*|ia64*) archive_cmds_CXX='$LD -b +h $soname -o $lib $linker_flags $libobjs $deplibs' ;; *) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | grep "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' ;; *) if test "$GXX" = yes; then if test $with_gnu_ld = no; then case "$host_cpu" in ia64*|hppa*64*) archive_cmds_CXX='$LD -b +h $soname -o $lib $linker_flags $libobjs $deplibs' ;; *) archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; irix5* | irix6*) case $cc_basename in CC) # SGI C++ archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${objdir}/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${objdir}/so_locations -o $lib' else archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` -o $lib' fi fi link_all_deplibs_CXX=yes ;; esac hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: ;; linux*) case $cc_basename in KCC) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | grep "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' hardcode_libdir_flag_spec_CXX='${wl}--rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc) # Intel C++ with_gnu_ld=yes archive_cmds_need_lc_CXX=no archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' whole_archive_flag_spec_CXX='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ;; cxx) # Compaq C++ archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' ;; esac ;; lynxos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; m88k*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; mvs*) case $cc_basename in cxx) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; osf3*) case $cc_basename in KCC) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; RCC) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx) allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && echo ${wl}-set_version $verstring` -update_registry ${objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld" | grep -v "ld:"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; osf4* | osf5*) case $cc_basename in KCC) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # the KAI C++ compiler. old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;; RCC) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx) allow_undefined_flag_CXX=' -expect_unresolved \*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${objdir}/so_locations -o $lib' archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname -Wl,-input -Wl,$lib.exp `test -n "$verstring" && echo -set_version $verstring` -update_registry $objdir/so_locations -o $lib~ $rm $lib.exp' hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld" | grep -v "ld:"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; sco*) archive_cmds_need_lc_CXX=no case $cc_basename in CC) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; sunos4*) case $cc_basename in CC) # Sun C++ 4.x # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; lcc) # Lucid # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; solaris*) case $cc_basename in CC) # Sun C++ 4.2, 5.x and Centerline C++ no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G${allow_undefined_flag} -nolib -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} -nolib ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_shlibpath_var_CXX=no case $host_os in solaris2.0-5 | solaris2.0-5.*) ;; *) # The C++ compiler is used as linker so we must use $wl # flag to pass the commands to the underlying system # linker. # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_CXX='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ;; esac link_all_deplibs_CXX=yes # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -G $CFLAGS -v conftest.$objext 2>&1 | grep "\-[LR]"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; gcx) # Green Hills C++ Compiler archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test "$GXX" = yes && test "$with_gnu_ld" = no; then no_undefined_flag_CXX=' ${wl}-z ${wl}defs' if $CC --version | grep -v '^2\.7' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd="$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep \"\-L\"" else # g++ 2.7 appears to require `-G' NOT `-shared' on this # platform. archive_cmds_CXX='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd="$CC -G $CFLAGS -v conftest.$objext 2>&1 | grep \"\-L\"" fi hardcode_libdir_flag_spec_CXX='${wl}-R $wl$libdir' fi ;; esac ;; sysv5OpenUNIX8* | sysv5UnixWare7* | sysv5uw[78]* | unixware7*) archive_cmds_need_lc_CXX=no ;; tandem*) case $cc_basename in NCC) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac echo "$as_me:$LINENO: result: $ld_shlibs_CXX" >&5 echo "${ECHO_T}$ld_shlibs_CXX" >&6 test "$ld_shlibs_CXX" = no && can_build_shared=no GCC_CXX="$GXX" LD_CXX="$LD" cat > conftest.$ac_ext <&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no # The `*' in the case matches for architectures that use `case' in # $output_verbose_cmd can trigger glob expansion during the loop # eval without this substitution. output_verbose_link_cmd="`$echo \"X$output_verbose_link_cmd\" | $Xsed -e \"$no_glob_subst\"`" for p in `eval $output_verbose_link_cmd`; do case $p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test $p = "-L" \ || test $p = "-R"; then prev=$p continue else prev= fi if test "$pre_test_object_deps_done" = no; then case $p in -L* | -R*) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$compiler_lib_search_path_CXX"; then compiler_lib_search_path_CXX="${prev}${p}" else compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} ${prev}${p}" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$postdeps_CXX"; then postdeps_CXX="${prev}${p}" else postdeps_CXX="${postdeps_CXX} ${prev}${p}" fi fi ;; *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test "$pre_test_object_deps_done" = no; then if test -z "$predep_objects_CXX"; then predep_objects_CXX="$p" else predep_objects_CXX="$predep_objects_CXX $p" fi else if test -z "$postdep_objects_CXX"; then postdep_objects_CXX="$p" else postdep_objects_CXX="$postdep_objects_CXX $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling CXX test program" fi $rm -f confest.$objext case " $postdeps_CXX " in *" -lc "*) archive_cmds_need_lc_CXX=no ;; esac lt_prog_compiler_wl_CXX= lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6 # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' fi ;; amigaos*) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4' ;; beos* | cygwin* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | os2* | pw32*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_CXX='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all lt_prog_compiler_pic_CXX= ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_CXX=-Kconform_pic fi ;; hpux*) # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac else case $host_os in aix4* | aix5*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' else lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68) # Green Hills C++ Compiler # _LT_AC_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; dgux*) case $cc_basename in ec++) lt_prog_compiler_pic_CXX='-KPIC' ;; ghcx) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; freebsd* | kfreebsd*-gnu) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX="${ac_cv_prog_cc_wl}-a ${ac_cv_prog_cc_wl}archive" if test "$host_cpu" != ia64; then lt_prog_compiler_pic_CXX='+Z' fi ;; aCC) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX="${ac_cv_prog_cc_wl}-a ${ac_cv_prog_cc_wl}archive" case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_CXX='+Z' ;; esac ;; *) ;; esac ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux*) case $cc_basename in KCC) # KAI C++ Compiler lt_prog_compiler_wl_CXX='--backend -Wl,' lt_prog_compiler_pic_CXX='-fPIC' ;; icpc) # Intel C++ lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-static' ;; cxx) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx) lt_prog_compiler_pic_CXX='-W c,exportall' ;; *) ;; esac ;; netbsd*) ;; osf3* | osf4* | osf5*) case $cc_basename in KCC) lt_prog_compiler_wl_CXX='--backend -Wl,' ;; RCC) # Rational C++ 2.4.1 lt_prog_compiler_pic_CXX='-pic' ;; cxx) # Digital/Compaq C++ lt_prog_compiler_wl_CXX='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; psos*) ;; sco*) case $cc_basename in CC) lt_prog_compiler_pic_CXX='-fPIC' ;; *) ;; esac ;; solaris*) case $cc_basename in CC) # Sun C++ 4.2, 5.x and Centerline C++ lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; gcx) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC) # Sun C++ 4.x lt_prog_compiler_pic_CXX='-pic' lt_prog_compiler_static_CXX='-Bstatic' ;; lcc) # Lucid lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; tandem*) case $cc_basename in NCC) # NonStop-UX NCC 3.20 lt_prog_compiler_pic_CXX='-KPIC' ;; *) ;; esac ;; unixware*) ;; vxworks*) ;; *) lt_prog_compiler_can_build_shared_CXX=no ;; esac fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_CXX" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_CXX" >&6 # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... $ECHO_C" >&6 if test "${lt_prog_compiler_pic_works_CXX+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:10351: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:10355: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_prog_compiler_pic_works_CXX=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_works_CXX" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_works_CXX" >&6 if test x"$lt_prog_compiler_pic_works_CXX" = xyes; then case $lt_prog_compiler_pic_CXX in "" | " "*) ;; *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;; esac else lt_prog_compiler_pic_CXX= lt_prog_compiler_can_build_shared_CXX=no fi fi case "$host_os" in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_CXX= ;; *) lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_c_o_CXX+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_c_o_CXX=no $rm -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:10411: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:10415: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s out/conftest.err; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . $rm conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $rm out/ii_files/* && rmdir out/ii_files $rm out/* && rmdir out cd .. rmdir conftest $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o_CXX" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_c_o_CXX" >&6 hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_CXX" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6 hard_links=yes $rm conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no echo "$as_me:$LINENO: result: $hard_links" >&5 echo "${ECHO_T}$hard_links" >&6 if test "$hard_links" = no; then { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6 export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' case $host_os in aix4* | aix5*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | grep 'GNU' > /dev/null; then export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' fi ;; pw32*) export_symbols_cmds_CXX="$ltdll_cmds" ;; cygwin* | mingw*) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGS] /s/.* \([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW] /s/.* //'\'' | sort | uniq > $export_symbols' ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac echo "$as_me:$LINENO: result: $ld_shlibs_CXX" >&5 echo "${ECHO_T}$ld_shlibs_CXX" >&6 test "$ld_shlibs_CXX" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_CXX" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_CXX=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds_CXX in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6 $rm conftest* printf "$lt_simple_compile_test_code" > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_CXX compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= if { (eval echo "$as_me:$LINENO: \"$archive_cmds_CXX 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1\"") >&5 (eval $archive_cmds_CXX 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } then archive_cmds_need_lc_CXX=no else archive_cmds_need_lc_CXX=yes fi allow_undefined_flag_CXX=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $rm conftest* echo "$as_me:$LINENO: result: $archive_cmds_need_lc_CXX" >&5 echo "${ECHO_T}$archive_cmds_need_lc_CXX" >&6 ;; esac fi ;; esac echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6 library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix4* | aix5*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | grep yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi4*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i;echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $rm \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/./-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='$(test .$module = .yes && echo .so || echo .dylib)' # Apple's gcc prints 'gcc -print-search-dirs' doesn't operate the same. if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | tr "\n" "$PATH_SEPARATOR" | sed -e 's/libraries:/@libraries:/' | tr "@" "\n" | grep "^libraries:" | sed -e "s/^libraries://" -e "s,=/,/,g" -e "s,$PATH_SEPARATOR, ,g" -e "s,.*,& /lib /usr/lib /usr/local/lib,g"` else sys_lib_search_path_spec='/lib /usr/lib /usr/local/lib' fi sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; kfreebsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; freebsd*) objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout` version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.01* | freebsdelf3.01*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; *) # from 3.2 on shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case "$host_cpu" in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # find out which ABI we are using libsuff= case "$host_cpu" in x86_64*|s390x*|powerpc64*) echo '#line 10922 "configure"' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case `/usr/bin/file conftest.$ac_objext` in *64-bit*) libsuff=64 sys_lib_search_path_spec="/lib${libsuff} /usr/lib${libsuff} /usr/local/lib${libsuff}" ;; esac fi rm -rf conftest* ;; esac # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`$SED -e 's/:,\t/ /g;s/=^=*$//;s/=^= * / /g' /etc/ld.so.conf | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib${libsuff} /usr/lib${libsuff} $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; knetbsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; nto-qnx*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; openbsd*) version_type=sunos need_lib_prefix=no need_version=yes library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; sco3.2v5*) version_type=osf soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no export_dynamic_flag_spec='${wl}-Blargedynsym' runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac echo "$as_me:$LINENO: result: $dynamic_linker" >&5 echo "${ECHO_T}$dynamic_linker" >&6 test "$dynamic_linker" = no && can_build_shared=no echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6 hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || \ test -n "$runpath_var CXX" || \ test "X$hardcode_automatic_CXX"="Xyes" ; then # We can hardcode non-existant directories. if test "$hardcode_direct_CXX" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_AC_TAGVAR(hardcode_shlibpath_var, CXX)" != no && test "$hardcode_minus_L_CXX" != no; then # Linking always hardcodes the temporary library directory. hardcode_action_CXX=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_CXX=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_CXX=unsupported fi echo "$as_me:$LINENO: result: $hardcode_action_CXX" >&5 echo "${ECHO_T}$hardcode_action_CXX" >&6 if test "$hardcode_action_CXX" = relink; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi striplib= old_striplib= echo "$as_me:$LINENO: checking whether stripping libraries is possible" >&5 echo $ECHO_N "checking whether stripping libraries is possible... $ECHO_C" >&6 if test -n "$STRIP" && $STRIP -V 2>&1 | grep "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi ;; *) echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 ;; esac fi if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; *) echo "$as_me:$LINENO: checking for shl_load" >&5 echo $ECHO_N "checking for shl_load... $ECHO_C" >&6 if test "${ac_cv_func_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define shl_load to an innocuous variant, in case declares shl_load. For example, HP-UX 11i declares gettimeofday. */ #define shl_load innocuous_shl_load /* System header to define __stub macros and hopefully few prototypes, which can conflict with char shl_load (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef shl_load /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_shl_load) || defined (__stub___shl_load) choke me #else char (*f) () = shl_load; #endif #ifdef __cplusplus } #endif int main () { return f != shl_load; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_shl_load" >&5 echo "${ECHO_T}$ac_cv_func_shl_load" >&6 if test $ac_cv_func_shl_load = yes; then lt_cv_dlopen="shl_load" else echo "$as_me:$LINENO: checking for shl_load in -ldld" >&5 echo $ECHO_N "checking for shl_load in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); int main () { shl_load (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_shl_load" >&5 echo "${ECHO_T}$ac_cv_lib_dld_shl_load" >&6 if test $ac_cv_lib_dld_shl_load = yes; then lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-dld" else echo "$as_me:$LINENO: checking for dlopen" >&5 echo $ECHO_N "checking for dlopen... $ECHO_C" >&6 if test "${ac_cv_func_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define dlopen to an innocuous variant, in case declares dlopen. For example, HP-UX 11i declares gettimeofday. */ #define dlopen innocuous_dlopen /* System header to define __stub macros and hopefully few prototypes, which can conflict with char dlopen (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef dlopen /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_dlopen) || defined (__stub___dlopen) choke me #else char (*f) () = dlopen; #endif #ifdef __cplusplus } #endif int main () { return f != dlopen; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_dlopen" >&5 echo "${ECHO_T}$ac_cv_func_dlopen" >&6 if test $ac_cv_func_dlopen = yes; then lt_cv_dlopen="dlopen" else echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else echo "$as_me:$LINENO: checking for dlopen in -lsvld" >&5 echo $ECHO_N "checking for dlopen in -lsvld... $ECHO_C" >&6 if test "${ac_cv_lib_svld_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_svld_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_svld_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_svld_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_svld_dlopen" >&6 if test $ac_cv_lib_svld_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else echo "$as_me:$LINENO: checking for dld_link in -ldld" >&5 echo $ECHO_N "checking for dld_link in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_dld_link+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dld_link (); int main () { dld_link (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_cxx_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_dld_link=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_dld_link=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_dld_link" >&5 echo "${ECHO_T}$ac_cv_lib_dld_dld_link" >&6 if test $ac_cv_lib_dld_dld_link = yes; then lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-dld" fi fi fi fi fi fi ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" echo "$as_me:$LINENO: checking whether a program can dlopen itself" >&5 echo $ECHO_N "checking whether a program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self" >&5 echo "${ECHO_T}$lt_cv_dlopen_self" >&6 if test "x$lt_cv_dlopen_self" = xyes; then LDFLAGS="$LDFLAGS $link_static_flag" echo "$as_me:$LINENO: checking whether a statically linked program can dlopen itself" >&5 echo $ECHO_N "checking whether a statically linked program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self_static+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self_static" >&5 echo "${ECHO_T}$lt_cv_dlopen_self_static" >&6 fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi # The else clause should only fire when bootstrapping the # libtool distribution, otherwise you forgot to ship ltmain.sh # with your package, and you will get complaints that there are # no rules to generate ltmain.sh. if test -f "$ltmain"; then # See if we are running on zsh, and set the options which allow our commands through # without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi # Now quote all the things that may contain metacharacters while being # careful not to overquote the AC_SUBSTed values. We take copies of the # variables and quote the copies for generation of the libtool script. for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC NM \ SED SHELL STRIP \ libname_spec library_names_spec soname_spec extract_expsyms_cmds \ old_striplib striplib file_magic_cmd finish_cmds finish_eval \ deplibs_check_method reload_flag reload_cmds need_locks \ lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ old_postinstall_cmds old_postuninstall_cmds \ compiler_CXX \ CC_CXX \ LD_CXX \ lt_prog_compiler_wl_CXX \ lt_prog_compiler_pic_CXX \ lt_prog_compiler_static_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ export_dynamic_flag_spec_CXX \ thread_safe_flag_spec_CXX \ whole_archive_flag_spec_CXX \ enable_shared_with_static_runtimes_CXX \ old_archive_cmds_CXX \ old_archive_from_new_cmds_CXX \ predep_objects_CXX \ postdep_objects_CXX \ predeps_CXX \ postdeps_CXX \ compiler_lib_search_path_CXX \ archive_cmds_CXX \ archive_expsym_cmds_CXX \ postinstall_cmds_CXX \ postuninstall_cmds_CXX \ old_archive_from_expsyms_cmds_CXX \ allow_undefined_flag_CXX \ no_undefined_flag_CXX \ export_symbols_cmds_CXX \ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_flag_spec_ld_CXX \ hardcode_libdir_separator_CXX \ hardcode_automatic_CXX \ module_cmds_CXX \ module_expsym_cmds_CXX \ lt_cv_prog_compiler_c_o_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX; do case $var in old_archive_cmds_CXX | \ old_archive_from_new_cmds_CXX | \ archive_cmds_CXX | \ archive_expsym_cmds_CXX | \ module_cmds_CXX | \ module_expsym_cmds_CXX | \ old_archive_from_expsyms_cmds_CXX | \ export_symbols_cmds_CXX | \ extract_expsyms_cmds | reload_cmds | finish_cmds | \ postinstall_cmds | postuninstall_cmds | \ old_postinstall_cmds | old_postuninstall_cmds | \ sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) # Double-quote double-evaled strings. eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ;; *) eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" ;; esac done case $lt_echo in *'\$0 --fallback-echo"') lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\$0 --fallback-echo"$/$0 --fallback-echo"/'` ;; esac cfgfile="$ofile" cat <<__EOF__ >> "$cfgfile" # ### BEGIN LIBTOOL TAG CONFIG: $tagname # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_CXX # Whether or not to disallow shared libs when runtime libs are static allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host # An echo program that does not interpret backslashes. echo=$lt_echo # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A C compiler. LTCC=$lt_LTCC # A language-specific compiler. CC=$lt_compiler_CXX # Is the compiler the GNU C compiler? with_gcc=$GCC_CXX # An ERE matcher. EGREP=$lt_EGREP # The linker used to build libraries. LD=$lt_LD_CXX # Whether we need hard or soft links. LN_S=$lt_LN_S # A BSD-compatible nm program. NM=$lt_NM # A symbol stripping program STRIP=$lt_STRIP # Used to examine libraries when file_magic_cmd begins "file" MAGIC_CMD=$MAGIC_CMD # Used on cygwin: DLL creation program. DLLTOOL="$DLLTOOL" # Used on cygwin: object dumper. OBJDUMP="$OBJDUMP" # Used on cygwin: assembler. AS="$AS" # The name of the directory that contains temporary libtool files. objdir=$objdir # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_CXX # Object file suffix (normally "o"). objext="$ac_objext" # Old archive suffix (normally "a"). libext="$libext" # Shared library suffix (normally ".so"). shrext_cmds='$shrext_cmds' # Executable file suffix (normally ""). exeext="$exeext" # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX pic_mode=$pic_mode # What is the maximum length of a command? max_cmd_len=$lt_cv_sys_max_cmd_len # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX # Must we lock files when doing compilation ? need_locks=$lt_need_locks # Do we need the lib prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX # Compiler flag to generate thread-safe objects. thread_safe_flag_spec=$lt_thread_safe_flag_spec_CXX # Library versioning type. version_type=$version_type # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME. library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Commands used to build and install an old-style archive. RANLIB=$lt_RANLIB old_archive_cmds=$lt_old_archive_cmds_CXX old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX # Commands used to build and install a shared archive. archive_cmds=$lt_archive_cmds_CXX archive_expsym_cmds=$lt_archive_expsym_cmds_CXX postinstall_cmds=$lt_postinstall_cmds postuninstall_cmds=$lt_postuninstall_cmds # Commands used to build a loadable module (assumed same as above if empty) module_cmds=$lt_module_cmds_CXX module_expsym_cmds=$lt_module_expsym_cmds_CXX # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # Dependencies to place before the objects being linked to create a # shared library. predep_objects=$lt_predep_objects_CXX # Dependencies to place after the objects being linked to create a # shared library. postdep_objects=$lt_postdep_objects_CXX # Dependencies to place before the objects being linked to create a # shared library. predeps=$lt_predeps_CXX # Dependencies to place after the objects being linked to create a # shared library. postdeps=$lt_postdeps_CXX # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_CXX # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == file_magic. file_magic_cmd=$lt_file_magic_cmd # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_CXX # Flag that forces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_CXX # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # Same as above, but a single script fragment to be evaled but not shown. finish_eval=$lt_finish_eval # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # This is the shared library runtime path variable. runpath_var=$runpath_var # This is the shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_CXX # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist. hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX # If ld is used when linking, flag to hardcode \$libdir into # a binary during linking. This must work even if \$libdir does # not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_CXX # Whether we need a single -rpath flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX # Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the # resulting binary. hardcode_direct=$hardcode_direct_CXX # Set to yes if using the -LDIR flag during linking hardcodes DIR into the # resulting binary. hardcode_minus_L=$hardcode_minus_L_CXX # Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into # the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX # Set to yes if building a shared library automatically hardcodes DIR into the library # and all subsequent libraries and executables linked against it. hardcode_automatic=$hardcode_automatic_CXX # Variables whose values should be saved in libtool wrapper scripts and # restored at relink time. variables_saved_for_relink="$variables_saved_for_relink" # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX # Compile-time system search path for libraries sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path="$fix_srcfile_path_CXX" # Set to yes if exported symbols are required. always_export_symbols=$always_export_symbols_CXX # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_CXX # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_CXX # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_CXX # ### END LIBTOOL TAG CONFIG: $tagname __EOF__ else # If there is no Makefile yet, we rely on a make rule to execute # `config.status --recheck' to rerun these tests and create the # libtool script then. ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` if test -f "$ltmain_in"; then test -f Makefile && make "$ltmain" fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC=$lt_save_CC LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ldcxx=$with_gnu_ld with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld else tagname="" fi ;; F77) if test -n "$F77" && test "X$F77" != "Xno"; then ac_ext=f ac_compile='$F77 -c $FFLAGS conftest.$ac_ext >&5' ac_link='$F77 -o conftest$ac_exeext $FFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_f77_compiler_gnu archive_cmds_need_lc_F77=no allow_undefined_flag_F77= always_export_symbols_F77=no archive_expsym_cmds_F77= export_dynamic_flag_spec_F77= hardcode_direct_F77=no hardcode_libdir_flag_spec_F77= hardcode_libdir_flag_spec_ld_F77= hardcode_libdir_separator_F77= hardcode_minus_L_F77=no hardcode_automatic_F77=no module_cmds_F77= module_expsym_cmds_F77= link_all_deplibs_F77=unknown old_archive_cmds_F77=$old_archive_cmds no_undefined_flag_F77= whole_archive_flag_spec_F77= enable_shared_with_static_runtimes_F77=no # Source file extension for f77 test sources. ac_ext=f # Object file extension for compiled f77 test sources. objext=o objext_F77=$objext # Code to be used in simple compile tests lt_simple_compile_test_code=" subroutine t\n return\n end\n" # Code to be used in simple link tests lt_simple_link_test_code=" program t\n end\n" # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # Allow CC to be a program name with arguments. lt_save_CC="$CC" CC=${F77-"f77"} compiler=$CC compiler_F77=$CC cc_basename=`$echo X"$compiler" | $Xsed -e 's%^.*/%%'` echo "$as_me:$LINENO: checking if libtool supports shared libraries" >&5 echo $ECHO_N "checking if libtool supports shared libraries... $ECHO_C" >&6 echo "$as_me:$LINENO: result: $can_build_shared" >&5 echo "${ECHO_T}$can_build_shared" >&6 echo "$as_me:$LINENO: checking whether to build shared libraries" >&5 echo $ECHO_N "checking whether to build shared libraries... $ECHO_C" >&6 test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case "$host_os" in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix4* | aix5*) test "$enable_shared" = yes && enable_static=no ;; esac echo "$as_me:$LINENO: result: $enable_shared" >&5 echo "${ECHO_T}$enable_shared" >&6 echo "$as_me:$LINENO: checking whether to build static libraries" >&5 echo $ECHO_N "checking whether to build static libraries... $ECHO_C" >&6 # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes echo "$as_me:$LINENO: result: $enable_static" >&5 echo "${ECHO_T}$enable_static" >&6 test "$ld_shlibs_F77" = no && can_build_shared=no GCC_F77="$G77" LD_F77="$LD" lt_prog_compiler_wl_F77= lt_prog_compiler_pic_F77= lt_prog_compiler_static_F77= echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6 if test "$GCC" = yes; then lt_prog_compiler_wl_F77='-Wl,' lt_prog_compiler_static_F77='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_F77='-Bstatic' fi ;; amigaos*) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic_F77='-m68020 -resident32 -malways-restore-a4' ;; beos* | cygwin* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_F77='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_F77='-fno-common' ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared_F77=no enable_shared=no ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_F77=-Kconform_pic fi ;; hpux*) # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_F77='-fPIC' ;; esac ;; *) lt_prog_compiler_pic_F77='-fPIC' ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl_F77='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_F77='-Bstatic' else lt_prog_compiler_static_F77='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_F77='-DDLL_EXPORT' ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl_F77='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_F77='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static_F77='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl_F77='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static_F77='-non_shared' ;; newsos6) lt_prog_compiler_pic_F77='-KPIC' lt_prog_compiler_static_F77='-Bstatic' ;; linux*) case $CC in icc* | ecc*) lt_prog_compiler_wl_F77='-Wl,' lt_prog_compiler_pic_F77='-KPIC' lt_prog_compiler_static_F77='-static' ;; ccc*) lt_prog_compiler_wl_F77='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static_F77='-non_shared' ;; esac ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl_F77='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static_F77='-non_shared' ;; sco3.2v5*) lt_prog_compiler_pic_F77='-Kpic' lt_prog_compiler_static_F77='-dn' ;; solaris*) lt_prog_compiler_wl_F77='-Wl,' lt_prog_compiler_pic_F77='-KPIC' lt_prog_compiler_static_F77='-Bstatic' ;; sunos4*) lt_prog_compiler_wl_F77='-Qoption ld ' lt_prog_compiler_pic_F77='-PIC' lt_prog_compiler_static_F77='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) lt_prog_compiler_wl_F77='-Wl,' lt_prog_compiler_pic_F77='-KPIC' lt_prog_compiler_static_F77='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then lt_prog_compiler_pic_F77='-Kconform_pic' lt_prog_compiler_static_F77='-Bstatic' fi ;; uts4*) lt_prog_compiler_pic_F77='-pic' lt_prog_compiler_static_F77='-Bstatic' ;; *) lt_prog_compiler_can_build_shared_F77=no ;; esac fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_F77" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_F77" >&6 # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_F77"; then echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic_F77 works" >&5 echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic_F77 works... $ECHO_C" >&6 if test "${lt_prog_compiler_pic_works_F77+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_prog_compiler_pic_works_F77=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_F77" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12718: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:12722: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_prog_compiler_pic_works_F77=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_works_F77" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_works_F77" >&6 if test x"$lt_prog_compiler_pic_works_F77" = xyes; then case $lt_prog_compiler_pic_F77 in "" | " "*) ;; *) lt_prog_compiler_pic_F77=" $lt_prog_compiler_pic_F77" ;; esac else lt_prog_compiler_pic_F77= lt_prog_compiler_can_build_shared_F77=no fi fi case "$host_os" in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_F77= ;; *) lt_prog_compiler_pic_F77="$lt_prog_compiler_pic_F77" ;; esac echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_c_o_F77+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_c_o_F77=no $rm -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12778: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:12782: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s out/conftest.err; then lt_cv_prog_compiler_c_o_F77=yes fi fi chmod u+w . $rm conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $rm out/ii_files/* && rmdir out/ii_files $rm out/* && rmdir out cd .. rmdir conftest $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o_F77" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_c_o_F77" >&6 hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_F77" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6 hard_links=yes $rm conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no echo "$as_me:$LINENO: result: $hard_links" >&5 echo "${ECHO_T}$hard_links" >&6 if test "$hard_links" = no; then { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6 runpath_var= allow_undefined_flag_F77= enable_shared_with_static_runtimes_F77=no archive_cmds_F77= archive_expsym_cmds_F77= old_archive_From_new_cmds_F77= old_archive_from_expsyms_cmds_F77= export_dynamic_flag_spec_F77= whole_archive_flag_spec_F77= thread_safe_flag_spec_F77= hardcode_libdir_flag_spec_F77= hardcode_libdir_flag_spec_ld_F77= hardcode_libdir_separator_F77= hardcode_direct_F77=no hardcode_minus_L_F77=no hardcode_shlibpath_var_F77=unsupported link_all_deplibs_F77=unknown hardcode_automatic_F77=no module_cmds_F77= module_expsym_cmds_F77= always_export_symbols_F77=no export_symbols_cmds_F77='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms_F77= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. exclude_expsyms_F77="_GLOBAL_OFFSET_TABLE_" # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs_F77=yes if test "$with_gnu_ld" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # See if GNU ld supports shared libraries. case $host_os in aix3* | aix4* | aix5*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs_F77=no cat <&2 *** Warning: the GNU linker, at least up to release 2.9.1, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to modify your PATH *** so that a non-GNU linker is found, and then restart. EOF fi ;; amigaos*) archive_cmds_F77='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_minus_L_F77=yes # Samuel A. Falvo II reports # that the semantics of dynamic libraries on AmigaOS, at least up # to version 4, is to share data among multiple programs linked # with the same dynamic library. Since this doesn't match the # behavior of shared libraries on other platforms, we can't use # them. ld_shlibs_F77=no ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_F77=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_F77='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs_F77=no fi ;; cygwin* | mingw* | pw32*) # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, F77) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_F77='-L$libdir' allow_undefined_flag_F77=unsupported always_export_symbols_F77=no enable_shared_with_static_runtimes_F77=yes export_symbols_cmds_F77='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGS] /s/.* \([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW] /s/.* //'\'' | sort | uniq > $export_symbols' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds_F77='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' else ld_shlibs=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds_F77='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris* | sysv5*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs_F77=no cat <&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. EOF elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs_F77=no fi ;; sunos4*) archive_cmds_F77='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct_F77=yes hardcode_shlibpath_var_F77=no ;; linux*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then tmp_archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_cmds_F77="$tmp_archive_cmds" supports_anon_versioning=no case `$LD -v 2>/dev/null` in *\ 01.* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac if test $supports_anon_versioning = yes; then archive_expsym_cmds_F77='$echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ $echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' else archive_expsym_cmds_F77="$tmp_archive_cmds" fi else ld_shlibs_F77=no fi ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs_F77=no fi ;; esac if test "$ld_shlibs_F77" = yes; then runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_F77='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec_F77='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | grep 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_F77="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec_F77= fi fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag_F77=unsupported always_export_symbols_F77=yes archive_expsym_cmds_F77='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L_F77=yes if test "$GCC" = yes && test -z "$link_static_flag"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct_F77=unsupported fi ;; aix4* | aix5*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | grep 'GNU' > /dev/null; then export_symbols_cmds_F77='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_F77='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix5*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_F77='' hardcode_direct_F77=yes hardcode_libdir_separator_F77=':' link_all_deplibs_F77=yes if test "$GCC" = yes; then case $host_os in aix4.012|aix4.012.*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 hardcode_direct_F77=yes else # We have old collect2 hardcode_direct_F77=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_F77=yes hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_libdir_separator_F77= fi esac shared_flag='-shared' else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols_F77=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag_F77='-berok' # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF program main end _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_f77_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_F77='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_F77="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec_F77='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag_F77="-z nodefs" archive_expsym_cmds_F77="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF program main end _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_f77_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_F77='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_F77=' ${wl}-bernotok' allow_undefined_flag_F77=' ${wl}-berok' # -bexpall does not export symbols beginning with underscore (_) always_export_symbols_F77=yes # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_F77=' ' archive_cmds_need_lc_F77=yes # This is similar to how AIX traditionally builds it's shared libraries. archive_expsym_cmds_F77="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}-bE:$export_symbols ${wl}-bnoentry${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) archive_cmds_F77='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_minus_L_F77=yes # see comment about different semantics on the GNU ld section ld_shlibs_F77=no ;; bsdi4*) export_dynamic_flag_spec_F77=-rdynamic ;; cygwin* | mingw* | pw32*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec_F77=' ' allow_undefined_flag_F77=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds_F77='$CC -o $lib $libobjs $compiler_flags `echo "$deplibs" | $SED -e '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_From_new_cmds_F77='true' # FIXME: Should let the user specify the lib program. old_archive_cmds_F77='lib /OUT:$oldlib$oldobjs$old_deplibs' fix_srcfile_path='`cygpath -w "$srcfile"`' enable_shared_with_static_runtimes_F77=yes ;; darwin* | rhapsody*) if test "$GXX" = yes ; then archive_cmds_need_lc_F77=no case "$host_os" in rhapsody* | darwin1.[012]) allow_undefined_flag_F77='-undefined suppress' ;; *) # Darwin 1.3 on if test -z ${MACOSX_DEPLOYMENT_TARGET} ; then allow_undefined_flag_F77='-flat_namespace -undefined suppress' else case ${MACOSX_DEPLOYMENT_TARGET} in 10.[012]) allow_undefined_flag_F77='-flat_namespace -undefined suppress' ;; 10.*) allow_undefined_flag_F77='-undefined dynamic_lookup' ;; esac fi ;; esac lt_int_apple_cc_single_mod=no output_verbose_link_cmd='echo' if $CC -dumpspecs 2>&1 | grep 'single_module' >/dev/null ; then lt_int_apple_cc_single_mod=yes fi if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_cmds_F77='$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring' else archive_cmds_F77='$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring' fi module_cmds_F77='$CC ${wl}-bind_at_load $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin ld's if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_expsym_cmds_F77='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' else archive_expsym_cmds_F77='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' fi module_expsym_cmds_F77='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' hardcode_direct_F77=no hardcode_automatic_F77=yes hardcode_shlibpath_var_F77=unsupported whole_archive_flag_spec_F77='-all_load $convenience' link_all_deplibs_F77=yes else ld_shlibs_F77=no fi ;; dgux*) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_shlibpath_var_F77=no ;; freebsd1*) ld_shlibs_F77=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec_F77='-R$libdir' hardcode_direct_F77=yes hardcode_shlibpath_var_F77=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2*) archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_F77=yes hardcode_minus_L_F77=yes hardcode_shlibpath_var_F77=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | kfreebsd*-gnu) archive_cmds_F77='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_F77='-R$libdir' hardcode_direct_F77=yes hardcode_shlibpath_var_F77=no ;; hpux9*) if test "$GCC" = yes; then archive_cmds_F77='$rm $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds_F77='$rm $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' hardcode_libdir_separator_F77=: hardcode_direct_F77=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_F77=yes export_dynamic_flag_spec_F77='${wl}-E' ;; hpux10* | hpux11*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*|ia64*) archive_cmds_F77='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_F77='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case "$host_cpu" in hppa*64*|ia64*) archive_cmds_F77='$LD -b +h $soname -o $lib $libobjs $deplibs $linker_flags' ;; *) archive_cmds_F77='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' ;; esac fi if test "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*) hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' hardcode_libdir_flag_spec_ld_F77='+b $libdir' hardcode_libdir_separator_F77=: hardcode_direct_F77=no hardcode_shlibpath_var_F77=no ;; ia64*) hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_direct_F77=no hardcode_shlibpath_var_F77=no # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_F77=yes ;; *) hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' hardcode_libdir_separator_F77=: hardcode_direct_F77=yes export_dynamic_flag_spec_F77='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_F77=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else archive_cmds_F77='$LD -shared $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_ld_F77='-rpath $libdir' fi hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_F77=: link_all_deplibs_F77=yes ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds_F77='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec_F77='-R$libdir' hardcode_direct_F77=yes hardcode_shlibpath_var_F77=no ;; newsos6) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_F77=yes hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_F77=: hardcode_shlibpath_var_F77=no ;; openbsd*) hardcode_direct_F77=yes hardcode_shlibpath_var_F77=no if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_F77='${wl}-rpath,$libdir' export_dynamic_flag_spec_F77='${wl}-E' else case $host_os in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_F77='-R$libdir' ;; *) archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_F77='${wl}-rpath,$libdir' ;; esac fi ;; os2*) hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_minus_L_F77=yes allow_undefined_flag_F77=unsupported archive_cmds_F77='$echo "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$echo "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$echo DATA >> $output_objdir/$libname.def~$echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~$echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' old_archive_From_new_cmds_F77='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then allow_undefined_flag_F77=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_F77='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else allow_undefined_flag_F77=' -expect_unresolved \*' archive_cmds_F77='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' fi hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_F77=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag_F77=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_F77='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' else allow_undefined_flag_F77=' -expect_unresolved \*' archive_cmds_F77='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds_F77='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; echo "-hidden">> $lib.exp~ $LD -shared${allow_undefined_flag} -input $lib.exp $linker_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${objdir}/so_locations -o $lib~$rm $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec_F77='-rpath $libdir' fi hardcode_libdir_separator_F77=: ;; sco3.2v5*) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_F77=no export_dynamic_flag_spec_F77='${wl}-Bexport' runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ;; solaris*) no_undefined_flag_F77=' -z text' if test "$GCC" = yes; then archive_cmds_F77='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_F77='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -shared ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$rm $lib.exp' else archive_cmds_F77='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds_F77='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' fi hardcode_libdir_flag_spec_F77='-R$libdir' hardcode_shlibpath_var_F77=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_F77='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_F77=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds_F77='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds_F77='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_direct_F77=yes hardcode_minus_L_F77=yes hardcode_shlibpath_var_F77=no ;; sysv4) case $host_vendor in sni) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_F77=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds_F77='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds_F77='$CC -r -o $output$reload_objs' hardcode_direct_F77=no ;; motorola) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_F77=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var_F77=no ;; sysv4.3*) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_F77=no export_dynamic_flag_spec_F77='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_F77=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs_F77=yes fi ;; sysv4.2uw2*) archive_cmds_F77='$LD -G -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_F77=yes hardcode_minus_L_F77=no hardcode_shlibpath_var_F77=no hardcode_runpath_var=yes runpath_var=LD_RUN_PATH ;; sysv5OpenUNIX8* | sysv5UnixWare7* | sysv5uw[78]* | unixware7*) no_undefined_flag_F77='${wl}-z ${wl}text' if test "$GCC" = yes; then archive_cmds_F77='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds_F77='$CC -G ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' fi runpath_var='LD_RUN_PATH' hardcode_shlibpath_var_F77=no ;; sysv5*) no_undefined_flag_F77=' -z text' # $CC -shared without GNU ld will not create a library from C++ # object files and a static libstdc++, better avoid it by now archive_cmds_F77='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds_F77='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' hardcode_libdir_flag_spec_F77= hardcode_shlibpath_var_F77=no runpath_var='LD_RUN_PATH' ;; uts4*) archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_F77='-L$libdir' hardcode_shlibpath_var_F77=no ;; *) ld_shlibs_F77=no ;; esac fi echo "$as_me:$LINENO: result: $ld_shlibs_F77" >&5 echo "${ECHO_T}$ld_shlibs_F77" >&6 test "$ld_shlibs_F77" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_F77" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_F77=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds_F77 in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6 $rm conftest* printf "$lt_simple_compile_test_code" > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_F77 compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_F77 allow_undefined_flag_F77= if { (eval echo "$as_me:$LINENO: \"$archive_cmds_F77 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1\"") >&5 (eval $archive_cmds_F77 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } then archive_cmds_need_lc_F77=no else archive_cmds_need_lc_F77=yes fi allow_undefined_flag_F77=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $rm conftest* echo "$as_me:$LINENO: result: $archive_cmds_need_lc_F77" >&5 echo "${ECHO_T}$archive_cmds_need_lc_F77" >&6 ;; esac fi ;; esac echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6 library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix4* | aix5*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | grep yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi4*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i;echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $rm \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/./-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='$(test .$module = .yes && echo .so || echo .dylib)' # Apple's gcc prints 'gcc -print-search-dirs' doesn't operate the same. if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | tr "\n" "$PATH_SEPARATOR" | sed -e 's/libraries:/@libraries:/' | tr "@" "\n" | grep "^libraries:" | sed -e "s/^libraries://" -e "s,=/,/,g" -e "s,$PATH_SEPARATOR, ,g" -e "s,.*,& /lib /usr/lib /usr/local/lib,g"` else sys_lib_search_path_spec='/lib /usr/lib /usr/local/lib' fi sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; kfreebsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; freebsd*) objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout` version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.01* | freebsdelf3.01*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; *) # from 3.2 on shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case "$host_cpu" in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # find out which ABI we are using libsuff= case "$host_cpu" in x86_64*|s390x*|powerpc64*) echo '#line 14092 "configure"' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case `/usr/bin/file conftest.$ac_objext` in *64-bit*) libsuff=64 sys_lib_search_path_spec="/lib${libsuff} /usr/lib${libsuff} /usr/local/lib${libsuff}" ;; esac fi rm -rf conftest* ;; esac # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`$SED -e 's/:,\t/ /g;s/=^=*$//;s/=^= * / /g' /etc/ld.so.conf | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib${libsuff} /usr/lib${libsuff} $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; knetbsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; nto-qnx*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; openbsd*) version_type=sunos need_lib_prefix=no need_version=yes library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; sco3.2v5*) version_type=osf soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no export_dynamic_flag_spec='${wl}-Blargedynsym' runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac echo "$as_me:$LINENO: result: $dynamic_linker" >&5 echo "${ECHO_T}$dynamic_linker" >&6 test "$dynamic_linker" = no && can_build_shared=no echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6 hardcode_action_F77= if test -n "$hardcode_libdir_flag_spec_F77" || \ test -n "$runpath_var F77" || \ test "X$hardcode_automatic_F77"="Xyes" ; then # We can hardcode non-existant directories. if test "$hardcode_direct_F77" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_AC_TAGVAR(hardcode_shlibpath_var, F77)" != no && test "$hardcode_minus_L_F77" != no; then # Linking always hardcodes the temporary library directory. hardcode_action_F77=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_F77=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_F77=unsupported fi echo "$as_me:$LINENO: result: $hardcode_action_F77" >&5 echo "${ECHO_T}$hardcode_action_F77" >&6 if test "$hardcode_action_F77" = relink; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi striplib= old_striplib= echo "$as_me:$LINENO: checking whether stripping libraries is possible" >&5 echo $ECHO_N "checking whether stripping libraries is possible... $ECHO_C" >&6 if test -n "$STRIP" && $STRIP -V 2>&1 | grep "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi ;; *) echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 ;; esac fi # The else clause should only fire when bootstrapping the # libtool distribution, otherwise you forgot to ship ltmain.sh # with your package, and you will get complaints that there are # no rules to generate ltmain.sh. if test -f "$ltmain"; then # See if we are running on zsh, and set the options which allow our commands through # without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi # Now quote all the things that may contain metacharacters while being # careful not to overquote the AC_SUBSTed values. We take copies of the # variables and quote the copies for generation of the libtool script. for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC NM \ SED SHELL STRIP \ libname_spec library_names_spec soname_spec extract_expsyms_cmds \ old_striplib striplib file_magic_cmd finish_cmds finish_eval \ deplibs_check_method reload_flag reload_cmds need_locks \ lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ old_postinstall_cmds old_postuninstall_cmds \ compiler_F77 \ CC_F77 \ LD_F77 \ lt_prog_compiler_wl_F77 \ lt_prog_compiler_pic_F77 \ lt_prog_compiler_static_F77 \ lt_prog_compiler_no_builtin_flag_F77 \ export_dynamic_flag_spec_F77 \ thread_safe_flag_spec_F77 \ whole_archive_flag_spec_F77 \ enable_shared_with_static_runtimes_F77 \ old_archive_cmds_F77 \ old_archive_from_new_cmds_F77 \ predep_objects_F77 \ postdep_objects_F77 \ predeps_F77 \ postdeps_F77 \ compiler_lib_search_path_F77 \ archive_cmds_F77 \ archive_expsym_cmds_F77 \ postinstall_cmds_F77 \ postuninstall_cmds_F77 \ old_archive_from_expsyms_cmds_F77 \ allow_undefined_flag_F77 \ no_undefined_flag_F77 \ export_symbols_cmds_F77 \ hardcode_libdir_flag_spec_F77 \ hardcode_libdir_flag_spec_ld_F77 \ hardcode_libdir_separator_F77 \ hardcode_automatic_F77 \ module_cmds_F77 \ module_expsym_cmds_F77 \ lt_cv_prog_compiler_c_o_F77 \ exclude_expsyms_F77 \ include_expsyms_F77; do case $var in old_archive_cmds_F77 | \ old_archive_from_new_cmds_F77 | \ archive_cmds_F77 | \ archive_expsym_cmds_F77 | \ module_cmds_F77 | \ module_expsym_cmds_F77 | \ old_archive_from_expsyms_cmds_F77 | \ export_symbols_cmds_F77 | \ extract_expsyms_cmds | reload_cmds | finish_cmds | \ postinstall_cmds | postuninstall_cmds | \ old_postinstall_cmds | old_postuninstall_cmds | \ sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) # Double-quote double-evaled strings. eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ;; *) eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" ;; esac done case $lt_echo in *'\$0 --fallback-echo"') lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\$0 --fallback-echo"$/$0 --fallback-echo"/'` ;; esac cfgfile="$ofile" cat <<__EOF__ >> "$cfgfile" # ### BEGIN LIBTOOL TAG CONFIG: $tagname # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_F77 # Whether or not to disallow shared libs when runtime libs are static allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_F77 # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host # An echo program that does not interpret backslashes. echo=$lt_echo # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A C compiler. LTCC=$lt_LTCC # A language-specific compiler. CC=$lt_compiler_F77 # Is the compiler the GNU C compiler? with_gcc=$GCC_F77 # An ERE matcher. EGREP=$lt_EGREP # The linker used to build libraries. LD=$lt_LD_F77 # Whether we need hard or soft links. LN_S=$lt_LN_S # A BSD-compatible nm program. NM=$lt_NM # A symbol stripping program STRIP=$lt_STRIP # Used to examine libraries when file_magic_cmd begins "file" MAGIC_CMD=$MAGIC_CMD # Used on cygwin: DLL creation program. DLLTOOL="$DLLTOOL" # Used on cygwin: object dumper. OBJDUMP="$OBJDUMP" # Used on cygwin: assembler. AS="$AS" # The name of the directory that contains temporary libtool files. objdir=$objdir # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_F77 # Object file suffix (normally "o"). objext="$ac_objext" # Old archive suffix (normally "a"). libext="$libext" # Shared library suffix (normally ".so"). shrext_cmds='$shrext_cmds' # Executable file suffix (normally ""). exeext="$exeext" # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_F77 pic_mode=$pic_mode # What is the maximum length of a command? max_cmd_len=$lt_cv_sys_max_cmd_len # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_F77 # Must we lock files when doing compilation ? need_locks=$lt_need_locks # Do we need the lib prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_F77 # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_F77 # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_F77 # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_F77 # Compiler flag to generate thread-safe objects. thread_safe_flag_spec=$lt_thread_safe_flag_spec_F77 # Library versioning type. version_type=$version_type # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME. library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Commands used to build and install an old-style archive. RANLIB=$lt_RANLIB old_archive_cmds=$lt_old_archive_cmds_F77 old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_F77 # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_F77 # Commands used to build and install a shared archive. archive_cmds=$lt_archive_cmds_F77 archive_expsym_cmds=$lt_archive_expsym_cmds_F77 postinstall_cmds=$lt_postinstall_cmds postuninstall_cmds=$lt_postuninstall_cmds # Commands used to build a loadable module (assumed same as above if empty) module_cmds=$lt_module_cmds_F77 module_expsym_cmds=$lt_module_expsym_cmds_F77 # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # Dependencies to place before the objects being linked to create a # shared library. predep_objects=$lt_predep_objects_F77 # Dependencies to place after the objects being linked to create a # shared library. postdep_objects=$lt_postdep_objects_F77 # Dependencies to place before the objects being linked to create a # shared library. predeps=$lt_predeps_F77 # Dependencies to place after the objects being linked to create a # shared library. postdeps=$lt_postdeps_F77 # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_F77 # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == file_magic. file_magic_cmd=$lt_file_magic_cmd # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_F77 # Flag that forces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_F77 # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # Same as above, but a single script fragment to be evaled but not shown. finish_eval=$lt_finish_eval # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # This is the shared library runtime path variable. runpath_var=$runpath_var # This is the shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_F77 # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist. hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_F77 # If ld is used when linking, flag to hardcode \$libdir into # a binary during linking. This must work even if \$libdir does # not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_F77 # Whether we need a single -rpath flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_F77 # Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the # resulting binary. hardcode_direct=$hardcode_direct_F77 # Set to yes if using the -LDIR flag during linking hardcodes DIR into the # resulting binary. hardcode_minus_L=$hardcode_minus_L_F77 # Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into # the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_F77 # Set to yes if building a shared library automatically hardcodes DIR into the library # and all subsequent libraries and executables linked against it. hardcode_automatic=$hardcode_automatic_F77 # Variables whose values should be saved in libtool wrapper scripts and # restored at relink time. variables_saved_for_relink="$variables_saved_for_relink" # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_F77 # Compile-time system search path for libraries sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path="$fix_srcfile_path_F77" # Set to yes if exported symbols are required. always_export_symbols=$always_export_symbols_F77 # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_F77 # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_F77 # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_F77 # ### END LIBTOOL TAG CONFIG: $tagname __EOF__ else # If there is no Makefile yet, we rely on a make rule to execute # `config.status --recheck' to rerun these tests and create the # libtool script then. ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` if test -f "$ltmain_in"; then test -f Makefile && make "$ltmain" fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC="$lt_save_CC" else tagname="" fi ;; GCJ) if test -n "$GCJ" && test "X$GCJ" != "Xno"; then # Source file extension for Java test sources. ac_ext=java # Object file extension for compiled Java test sources. objext=o objext_GCJ=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="class foo {}\n" # Code to be used in simple link tests lt_simple_link_test_code='public class conftest { public static void main(String argv) {}; }\n' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # Allow CC to be a program name with arguments. lt_save_CC="$CC" CC=${GCJ-"gcj"} compiler=$CC compiler_GCJ=$CC # GCJ did not exist at the time GCC didn't implicitly link libc in. archive_cmds_need_lc_GCJ=no lt_prog_compiler_no_builtin_flag_GCJ= if test "$GCC" = yes; then lt_prog_compiler_no_builtin_flag_GCJ=' -fno-builtin' echo "$as_me:$LINENO: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 echo $ECHO_N "checking if $compiler supports -fno-rtti -fno-exceptions... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:14833: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:14837: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_rtti_exceptions" >&6 if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then lt_prog_compiler_no_builtin_flag_GCJ="$lt_prog_compiler_no_builtin_flag_GCJ -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl_GCJ= lt_prog_compiler_pic_GCJ= lt_prog_compiler_static_GCJ= echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6 if test "$GCC" = yes; then lt_prog_compiler_wl_GCJ='-Wl,' lt_prog_compiler_static_GCJ='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_GCJ='-Bstatic' fi ;; amigaos*) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic_GCJ='-m68020 -resident32 -malways-restore-a4' ;; beos* | cygwin* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_GCJ='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_GCJ='-fno-common' ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared_GCJ=no enable_shared=no ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_GCJ=-Kconform_pic fi ;; hpux*) # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_GCJ='-fPIC' ;; esac ;; *) lt_prog_compiler_pic_GCJ='-fPIC' ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl_GCJ='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_GCJ='-Bstatic' else lt_prog_compiler_static_GCJ='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | pw32* | os2*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_GCJ='-DDLL_EXPORT' ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl_GCJ='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case "$host_cpu" in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_GCJ='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static_GCJ='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl_GCJ='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static_GCJ='-non_shared' ;; newsos6) lt_prog_compiler_pic_GCJ='-KPIC' lt_prog_compiler_static_GCJ='-Bstatic' ;; linux*) case $CC in icc* | ecc*) lt_prog_compiler_wl_GCJ='-Wl,' lt_prog_compiler_pic_GCJ='-KPIC' lt_prog_compiler_static_GCJ='-static' ;; ccc*) lt_prog_compiler_wl_GCJ='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static_GCJ='-non_shared' ;; esac ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl_GCJ='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static_GCJ='-non_shared' ;; sco3.2v5*) lt_prog_compiler_pic_GCJ='-Kpic' lt_prog_compiler_static_GCJ='-dn' ;; solaris*) lt_prog_compiler_wl_GCJ='-Wl,' lt_prog_compiler_pic_GCJ='-KPIC' lt_prog_compiler_static_GCJ='-Bstatic' ;; sunos4*) lt_prog_compiler_wl_GCJ='-Qoption ld ' lt_prog_compiler_pic_GCJ='-PIC' lt_prog_compiler_static_GCJ='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) lt_prog_compiler_wl_GCJ='-Wl,' lt_prog_compiler_pic_GCJ='-KPIC' lt_prog_compiler_static_GCJ='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then lt_prog_compiler_pic_GCJ='-Kconform_pic' lt_prog_compiler_static_GCJ='-Bstatic' fi ;; uts4*) lt_prog_compiler_pic_GCJ='-pic' lt_prog_compiler_static_GCJ='-Bstatic' ;; *) lt_prog_compiler_can_build_shared_GCJ=no ;; esac fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_GCJ" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_GCJ" >&6 # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_GCJ"; then echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic_GCJ works" >&5 echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic_GCJ works... $ECHO_C" >&6 if test "${lt_prog_compiler_pic_works_GCJ+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_prog_compiler_pic_works_GCJ=no ac_outfile=conftest.$ac_objext printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_GCJ" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:15066: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:15070: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s conftest.err; then lt_prog_compiler_pic_works_GCJ=yes fi fi $rm conftest* fi echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_works_GCJ" >&5 echo "${ECHO_T}$lt_prog_compiler_pic_works_GCJ" >&6 if test x"$lt_prog_compiler_pic_works_GCJ" = xyes; then case $lt_prog_compiler_pic_GCJ in "" | " "*) ;; *) lt_prog_compiler_pic_GCJ=" $lt_prog_compiler_pic_GCJ" ;; esac else lt_prog_compiler_pic_GCJ= lt_prog_compiler_can_build_shared_GCJ=no fi fi case "$host_os" in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_GCJ= ;; *) lt_prog_compiler_pic_GCJ="$lt_prog_compiler_pic_GCJ" ;; esac echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6 if test "${lt_cv_prog_compiler_c_o_GCJ+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else lt_cv_prog_compiler_c_o_GCJ=no $rm -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out printf "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}? :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:15126: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:15130: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings if test ! -s out/conftest.err; then lt_cv_prog_compiler_c_o_GCJ=yes fi fi chmod u+w . $rm conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $rm out/ii_files/* && rmdir out/ii_files $rm out/* && rmdir out cd .. rmdir conftest $rm conftest* fi echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o_GCJ" >&5 echo "${ECHO_T}$lt_cv_prog_compiler_c_o_GCJ" >&6 hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_GCJ" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6 hard_links=yes $rm conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no echo "$as_me:$LINENO: result: $hard_links" >&5 echo "${ECHO_T}$hard_links" >&6 if test "$hard_links" = no; then { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6 runpath_var= allow_undefined_flag_GCJ= enable_shared_with_static_runtimes_GCJ=no archive_cmds_GCJ= archive_expsym_cmds_GCJ= old_archive_From_new_cmds_GCJ= old_archive_from_expsyms_cmds_GCJ= export_dynamic_flag_spec_GCJ= whole_archive_flag_spec_GCJ= thread_safe_flag_spec_GCJ= hardcode_libdir_flag_spec_GCJ= hardcode_libdir_flag_spec_ld_GCJ= hardcode_libdir_separator_GCJ= hardcode_direct_GCJ=no hardcode_minus_L_GCJ=no hardcode_shlibpath_var_GCJ=unsupported link_all_deplibs_GCJ=unknown hardcode_automatic_GCJ=no module_cmds_GCJ= module_expsym_cmds_GCJ= always_export_symbols_GCJ=no export_symbols_cmds_GCJ='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms_GCJ= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. exclude_expsyms_GCJ="_GLOBAL_OFFSET_TABLE_" # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs_GCJ=yes if test "$with_gnu_ld" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # See if GNU ld supports shared libraries. case $host_os in aix3* | aix4* | aix5*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs_GCJ=no cat <&2 *** Warning: the GNU linker, at least up to release 2.9.1, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to modify your PATH *** so that a non-GNU linker is found, and then restart. EOF fi ;; amigaos*) archive_cmds_GCJ='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_minus_L_GCJ=yes # Samuel A. Falvo II reports # that the semantics of dynamic libraries on AmigaOS, at least up # to version 4, is to share data among multiple programs linked # with the same dynamic library. Since this doesn't match the # behavior of shared libraries on other platforms, we can't use # them. ld_shlibs_GCJ=no ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_GCJ=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_GCJ='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs_GCJ=no fi ;; cygwin* | mingw* | pw32*) # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, GCJ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_GCJ='-L$libdir' allow_undefined_flag_GCJ=unsupported always_export_symbols_GCJ=no enable_shared_with_static_runtimes_GCJ=yes export_symbols_cmds_GCJ='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGS] /s/.* \([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW] /s/.* //'\'' | sort | uniq > $export_symbols' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then archive_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds_GCJ='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--image-base=0x10000000 ${wl}--out-implib,$lib' else ld_shlibs=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds_GCJ='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris* | sysv5*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs_GCJ=no cat <&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. EOF elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs_GCJ=no fi ;; sunos4*) archive_cmds_GCJ='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; linux*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then tmp_archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_cmds_GCJ="$tmp_archive_cmds" supports_anon_versioning=no case `$LD -v 2>/dev/null` in *\ 01.* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac if test $supports_anon_versioning = yes; then archive_expsym_cmds_GCJ='$echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ $echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' else archive_expsym_cmds_GCJ="$tmp_archive_cmds" fi else ld_shlibs_GCJ=no fi ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then archive_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs_GCJ=no fi ;; esac if test "$ld_shlibs_GCJ" = yes; then runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_GCJ='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec_GCJ='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | grep 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_GCJ="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec_GCJ= fi fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag_GCJ=unsupported always_export_symbols_GCJ=yes archive_expsym_cmds_GCJ='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L_GCJ=yes if test "$GCC" = yes && test -z "$link_static_flag"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct_GCJ=unsupported fi ;; aix4* | aix5*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | grep 'GNU' > /dev/null; then export_symbols_cmds_GCJ='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_GCJ='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix5*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_GCJ='' hardcode_direct_GCJ=yes hardcode_libdir_separator_GCJ=':' link_all_deplibs_GCJ=yes if test "$GCC" = yes; then case $host_os in aix4.012|aix4.012.*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 hardcode_direct_GCJ=yes else # We have old collect2 hardcode_direct_GCJ=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_GCJ=yes hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_libdir_separator_GCJ= fi esac shared_flag='-shared' else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols_GCJ=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag_GCJ='-berok' # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_GCJ='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_GCJ="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec_GCJ='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag_GCJ="-z nodefs" archive_expsym_cmds_GCJ="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an empty executable. cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'`; fi else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_GCJ='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_GCJ=' ${wl}-bernotok' allow_undefined_flag_GCJ=' ${wl}-berok' # -bexpall does not export symbols beginning with underscore (_) always_export_symbols_GCJ=yes # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_GCJ=' ' archive_cmds_need_lc_GCJ=yes # This is similar to how AIX traditionally builds it's shared libraries. archive_expsym_cmds_GCJ="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags ${wl}-bE:$export_symbols ${wl}-bnoentry${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) archive_cmds_GCJ='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_minus_L_GCJ=yes # see comment about different semantics on the GNU ld section ld_shlibs_GCJ=no ;; bsdi4*) export_dynamic_flag_spec_GCJ=-rdynamic ;; cygwin* | mingw* | pw32*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec_GCJ=' ' allow_undefined_flag_GCJ=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds_GCJ='$CC -o $lib $libobjs $compiler_flags `echo "$deplibs" | $SED -e '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_From_new_cmds_GCJ='true' # FIXME: Should let the user specify the lib program. old_archive_cmds_GCJ='lib /OUT:$oldlib$oldobjs$old_deplibs' fix_srcfile_path='`cygpath -w "$srcfile"`' enable_shared_with_static_runtimes_GCJ=yes ;; darwin* | rhapsody*) if test "$GXX" = yes ; then archive_cmds_need_lc_GCJ=no case "$host_os" in rhapsody* | darwin1.[012]) allow_undefined_flag_GCJ='-undefined suppress' ;; *) # Darwin 1.3 on if test -z ${MACOSX_DEPLOYMENT_TARGET} ; then allow_undefined_flag_GCJ='-flat_namespace -undefined suppress' else case ${MACOSX_DEPLOYMENT_TARGET} in 10.[012]) allow_undefined_flag_GCJ='-flat_namespace -undefined suppress' ;; 10.*) allow_undefined_flag_GCJ='-undefined dynamic_lookup' ;; esac fi ;; esac lt_int_apple_cc_single_mod=no output_verbose_link_cmd='echo' if $CC -dumpspecs 2>&1 | grep 'single_module' >/dev/null ; then lt_int_apple_cc_single_mod=yes fi if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_cmds_GCJ='$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring' else archive_cmds_GCJ='$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring' fi module_cmds_GCJ='$CC ${wl}-bind_at_load $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin ld's if test "X$lt_int_apple_cc_single_mod" = Xyes ; then archive_expsym_cmds_GCJ='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -dynamiclib -single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' else archive_expsym_cmds_GCJ='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -r ${wl}-bind_at_load -keep_private_externs -nostdlib -o ${lib}-master.o $libobjs~$CC -dynamiclib $allow_undefined_flag -o $lib ${lib}-master.o $deplibs $compiler_flags -install_name $rpath/$soname $verstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' fi module_expsym_cmds_GCJ='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' hardcode_direct_GCJ=no hardcode_automatic_GCJ=yes hardcode_shlibpath_var_GCJ=unsupported whole_archive_flag_spec_GCJ='-all_load $convenience' link_all_deplibs_GCJ=yes else ld_shlibs_GCJ=no fi ;; dgux*) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_shlibpath_var_GCJ=no ;; freebsd1*) ld_shlibs_GCJ=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds_GCJ='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec_GCJ='-R$libdir' hardcode_direct_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2*) archive_cmds_GCJ='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_GCJ=yes hardcode_minus_L_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | kfreebsd*-gnu) archive_cmds_GCJ='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_GCJ='-R$libdir' hardcode_direct_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; hpux9*) if test "$GCC" = yes; then archive_cmds_GCJ='$rm $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds_GCJ='$rm $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec_GCJ='${wl}+b ${wl}$libdir' hardcode_libdir_separator_GCJ=: hardcode_direct_GCJ=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_GCJ=yes export_dynamic_flag_spec_GCJ='${wl}-E' ;; hpux10* | hpux11*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*|ia64*) archive_cmds_GCJ='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_GCJ='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case "$host_cpu" in hppa*64*|ia64*) archive_cmds_GCJ='$LD -b +h $soname -o $lib $libobjs $deplibs $linker_flags' ;; *) archive_cmds_GCJ='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' ;; esac fi if test "$with_gnu_ld" = no; then case "$host_cpu" in hppa*64*) hardcode_libdir_flag_spec_GCJ='${wl}+b ${wl}$libdir' hardcode_libdir_flag_spec_ld_GCJ='+b $libdir' hardcode_libdir_separator_GCJ=: hardcode_direct_GCJ=no hardcode_shlibpath_var_GCJ=no ;; ia64*) hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_direct_GCJ=no hardcode_shlibpath_var_GCJ=no # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_GCJ=yes ;; *) hardcode_libdir_flag_spec_GCJ='${wl}+b ${wl}$libdir' hardcode_libdir_separator_GCJ=: hardcode_direct_GCJ=yes export_dynamic_flag_spec_GCJ='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L_GCJ=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then archive_cmds_GCJ='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else archive_cmds_GCJ='$LD -shared $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_ld_GCJ='-rpath $libdir' fi hardcode_libdir_flag_spec_GCJ='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_GCJ=: link_all_deplibs_GCJ=yes ;; netbsd*) if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then archive_cmds_GCJ='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds_GCJ='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec_GCJ='-R$libdir' hardcode_direct_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; newsos6) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_GCJ=yes hardcode_libdir_flag_spec_GCJ='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_GCJ=: hardcode_shlibpath_var_GCJ=no ;; openbsd*) hardcode_direct_GCJ=yes hardcode_shlibpath_var_GCJ=no if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_cmds_GCJ='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_GCJ='${wl}-rpath,$libdir' export_dynamic_flag_spec_GCJ='${wl}-E' else case $host_os in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) archive_cmds_GCJ='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_GCJ='-R$libdir' ;; *) archive_cmds_GCJ='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec_GCJ='${wl}-rpath,$libdir' ;; esac fi ;; os2*) hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_minus_L_GCJ=yes allow_undefined_flag_GCJ=unsupported archive_cmds_GCJ='$echo "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$echo "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$echo DATA >> $output_objdir/$libname.def~$echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~$echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' old_archive_From_new_cmds_GCJ='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then allow_undefined_flag_GCJ=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_GCJ='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else allow_undefined_flag_GCJ=' -expect_unresolved \*' archive_cmds_GCJ='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' fi hardcode_libdir_flag_spec_GCJ='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_GCJ=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag_GCJ=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_GCJ='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_GCJ='${wl}-rpath ${wl}$libdir' else allow_undefined_flag_GCJ=' -expect_unresolved \*' archive_cmds_GCJ='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds_GCJ='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; echo "-hidden">> $lib.exp~ $LD -shared${allow_undefined_flag} -input $lib.exp $linker_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${objdir}/so_locations -o $lib~$rm $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec_GCJ='-rpath $libdir' fi hardcode_libdir_separator_GCJ=: ;; sco3.2v5*) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_GCJ=no export_dynamic_flag_spec_GCJ='${wl}-Bexport' runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ;; solaris*) no_undefined_flag_GCJ=' -z text' if test "$GCC" = yes; then archive_cmds_GCJ='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_GCJ='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $CC -shared ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$rm $lib.exp' else archive_cmds_GCJ='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds_GCJ='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' fi hardcode_libdir_flag_spec_GCJ='-R$libdir' hardcode_shlibpath_var_GCJ=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_GCJ='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_GCJ=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds_GCJ='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds_GCJ='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_direct_GCJ=yes hardcode_minus_L_GCJ=yes hardcode_shlibpath_var_GCJ=no ;; sysv4) case $host_vendor in sni) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_GCJ=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds_GCJ='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds_GCJ='$CC -r -o $output$reload_objs' hardcode_direct_GCJ=no ;; motorola) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_GCJ=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var_GCJ=no ;; sysv4.3*) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_GCJ=no export_dynamic_flag_spec_GCJ='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var_GCJ=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs_GCJ=yes fi ;; sysv4.2uw2*) archive_cmds_GCJ='$LD -G -o $lib $libobjs $deplibs $linker_flags' hardcode_direct_GCJ=yes hardcode_minus_L_GCJ=no hardcode_shlibpath_var_GCJ=no hardcode_runpath_var=yes runpath_var=LD_RUN_PATH ;; sysv5OpenUNIX8* | sysv5UnixWare7* | sysv5uw[78]* | unixware7*) no_undefined_flag_GCJ='${wl}-z ${wl}text' if test "$GCC" = yes; then archive_cmds_GCJ='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds_GCJ='$CC -G ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' fi runpath_var='LD_RUN_PATH' hardcode_shlibpath_var_GCJ=no ;; sysv5*) no_undefined_flag_GCJ=' -z text' # $CC -shared without GNU ld will not create a library from C++ # object files and a static libstdc++, better avoid it by now archive_cmds_GCJ='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds_GCJ='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' hardcode_libdir_flag_spec_GCJ= hardcode_shlibpath_var_GCJ=no runpath_var='LD_RUN_PATH' ;; uts4*) archive_cmds_GCJ='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec_GCJ='-L$libdir' hardcode_shlibpath_var_GCJ=no ;; *) ld_shlibs_GCJ=no ;; esac fi echo "$as_me:$LINENO: result: $ld_shlibs_GCJ" >&5 echo "${ECHO_T}$ld_shlibs_GCJ" >&6 test "$ld_shlibs_GCJ" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_GCJ" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_GCJ=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds_GCJ in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6 $rm conftest* printf "$lt_simple_compile_test_code" > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_GCJ compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_GCJ allow_undefined_flag_GCJ= if { (eval echo "$as_me:$LINENO: \"$archive_cmds_GCJ 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1\"") >&5 (eval $archive_cmds_GCJ 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } then archive_cmds_need_lc_GCJ=no else archive_cmds_need_lc_GCJ=yes fi allow_undefined_flag_GCJ=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $rm conftest* echo "$as_me:$LINENO: result: $archive_cmds_need_lc_GCJ" >&5 echo "${ECHO_T}$archive_cmds_need_lc_GCJ" >&6 ;; esac fi ;; esac echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6 library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix4* | aix5*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | grep yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi4*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i;echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $rm \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if echo "$sys_lib_search_path_spec" | grep ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/./-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='$(test .$module = .yes && echo .so || echo .dylib)' # Apple's gcc prints 'gcc -print-search-dirs' doesn't operate the same. if test "$GCC" = yes; then sys_lib_search_path_spec=`$CC -print-search-dirs | tr "\n" "$PATH_SEPARATOR" | sed -e 's/libraries:/@libraries:/' | tr "@" "\n" | grep "^libraries:" | sed -e "s/^libraries://" -e "s,=/,/,g" -e "s,$PATH_SEPARATOR, ,g" -e "s,.*,& /lib /usr/lib /usr/local/lib,g"` else sys_lib_search_path_spec='/lib /usr/lib /usr/local/lib' fi sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; kfreebsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; freebsd*) objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout` version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.01* | freebsdelf3.01*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; *) # from 3.2 on shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case "$host_cpu" in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # find out which ABI we are using libsuff= case "$host_cpu" in x86_64*|s390x*|powerpc64*) echo '#line 16460 "configure"' > conftest.$ac_ext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; then case `/usr/bin/file conftest.$ac_objext` in *64-bit*) libsuff=64 sys_lib_search_path_spec="/lib${libsuff} /usr/lib${libsuff} /usr/local/lib${libsuff}" ;; esac fi rm -rf conftest* ;; esac # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`$SED -e 's/:,\t/ /g;s/=^=*$//;s/=^= * / /g' /etc/ld.so.conf | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib${libsuff} /usr/lib${libsuff} $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; knetbsd*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='GNU ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; nto-qnx*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; openbsd*) version_type=sunos need_lib_prefix=no need_version=yes library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; sco3.2v5*) version_type=osf soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no export_dynamic_flag_spec='${wl}-Blargedynsym' runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac echo "$as_me:$LINENO: result: $dynamic_linker" >&5 echo "${ECHO_T}$dynamic_linker" >&6 test "$dynamic_linker" = no && can_build_shared=no echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6 hardcode_action_GCJ= if test -n "$hardcode_libdir_flag_spec_GCJ" || \ test -n "$runpath_var GCJ" || \ test "X$hardcode_automatic_GCJ"="Xyes" ; then # We can hardcode non-existant directories. if test "$hardcode_direct_GCJ" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_AC_TAGVAR(hardcode_shlibpath_var, GCJ)" != no && test "$hardcode_minus_L_GCJ" != no; then # Linking always hardcodes the temporary library directory. hardcode_action_GCJ=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_GCJ=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_GCJ=unsupported fi echo "$as_me:$LINENO: result: $hardcode_action_GCJ" >&5 echo "${ECHO_T}$hardcode_action_GCJ" >&6 if test "$hardcode_action_GCJ" = relink; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi striplib= old_striplib= echo "$as_me:$LINENO: checking whether stripping libraries is possible" >&5 echo $ECHO_N "checking whether stripping libraries is possible... $ECHO_C" >&6 if test -n "$STRIP" && $STRIP -V 2>&1 | grep "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" echo "$as_me:$LINENO: result: yes" >&5 echo "${ECHO_T}yes" >&6 else echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 fi ;; *) echo "$as_me:$LINENO: result: no" >&5 echo "${ECHO_T}no" >&6 ;; esac fi if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; *) echo "$as_me:$LINENO: checking for shl_load" >&5 echo $ECHO_N "checking for shl_load... $ECHO_C" >&6 if test "${ac_cv_func_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define shl_load to an innocuous variant, in case declares shl_load. For example, HP-UX 11i declares gettimeofday. */ #define shl_load innocuous_shl_load /* System header to define __stub macros and hopefully few prototypes, which can conflict with char shl_load (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef shl_load /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_shl_load) || defined (__stub___shl_load) choke me #else char (*f) () = shl_load; #endif #ifdef __cplusplus } #endif int main () { return f != shl_load; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_shl_load" >&5 echo "${ECHO_T}$ac_cv_func_shl_load" >&6 if test $ac_cv_func_shl_load = yes; then lt_cv_dlopen="shl_load" else echo "$as_me:$LINENO: checking for shl_load in -ldld" >&5 echo $ECHO_N "checking for shl_load in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_shl_load+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char shl_load (); int main () { shl_load (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_shl_load=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_shl_load=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_shl_load" >&5 echo "${ECHO_T}$ac_cv_lib_dld_shl_load" >&6 if test $ac_cv_lib_dld_shl_load = yes; then lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-dld" else echo "$as_me:$LINENO: checking for dlopen" >&5 echo $ECHO_N "checking for dlopen... $ECHO_C" >&6 if test "${ac_cv_func_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Define dlopen to an innocuous variant, in case declares dlopen. For example, HP-UX 11i declares gettimeofday. */ #define dlopen innocuous_dlopen /* System header to define __stub macros and hopefully few prototypes, which can conflict with char dlopen (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef dlopen /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" { #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined (__stub_dlopen) || defined (__stub___dlopen) choke me #else char (*f) () = dlopen; #endif #ifdef __cplusplus } #endif int main () { return f != dlopen; ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_func_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_func_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_func_dlopen" >&5 echo "${ECHO_T}$ac_cv_func_dlopen" >&6 if test $ac_cv_func_dlopen = yes; then lt_cv_dlopen="dlopen" else echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6 if test "${ac_cv_lib_dl_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dl_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dl_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6 if test $ac_cv_lib_dl_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else echo "$as_me:$LINENO: checking for dlopen in -lsvld" >&5 echo $ECHO_N "checking for dlopen in -lsvld... $ECHO_C" >&6 if test "${ac_cv_lib_svld_dlopen+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dlopen (); int main () { dlopen (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_svld_dlopen=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_svld_dlopen=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_svld_dlopen" >&5 echo "${ECHO_T}$ac_cv_lib_svld_dlopen" >&6 if test $ac_cv_lib_svld_dlopen = yes; then lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else echo "$as_me:$LINENO: checking for dld_link in -ldld" >&5 echo $ECHO_N "checking for dld_link in -ldld... $ECHO_C" >&6 if test "${ac_cv_lib_dld_dld_link+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ /* Override any gcc2 internal prototype to avoid an error. */ #ifdef __cplusplus extern "C" #endif /* We use char because int might match the return type of a gcc2 builtin and then its argument prototype would still apply. */ char dld_link (); int main () { dld_link (); ; return 0; } _ACEOF rm -f conftest.$ac_objext conftest$ac_exeext if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest$ac_exeext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_lib_dld_dld_link=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_lib_dld_dld_link=no fi rm -f conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi echo "$as_me:$LINENO: result: $ac_cv_lib_dld_dld_link" >&5 echo "${ECHO_T}$ac_cv_lib_dld_dld_link" >&6 if test $ac_cv_lib_dld_dld_link = yes; then lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-dld" fi fi fi fi fi fi ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" echo "$as_me:$LINENO: checking whether a program can dlopen itself" >&5 echo $ECHO_N "checking whether a program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self" >&5 echo "${ECHO_T}$lt_cv_dlopen_self" >&6 if test "x$lt_cv_dlopen_self" = xyes; then LDFLAGS="$LDFLAGS $link_static_flag" echo "$as_me:$LINENO: checking whether a statically linked program can dlopen itself" >&5 echo $ECHO_N "checking whether a statically linked program can dlopen itself... $ECHO_C" >&6 if test "${lt_cv_dlopen_self_static+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext < #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif #ifdef __cplusplus extern "C" void exit (int); #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } exit (status); } EOF if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 (eval $ac_link) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_unknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi echo "$as_me:$LINENO: result: $lt_cv_dlopen_self_static" >&5 echo "${ECHO_T}$lt_cv_dlopen_self_static" >&6 fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi # The else clause should only fire when bootstrapping the # libtool distribution, otherwise you forgot to ship ltmain.sh # with your package, and you will get complaints that there are # no rules to generate ltmain.sh. if test -f "$ltmain"; then # See if we are running on zsh, and set the options which allow our commands through # without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi # Now quote all the things that may contain metacharacters while being # careful not to overquote the AC_SUBSTed values. We take copies of the # variables and quote the copies for generation of the libtool script. for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC NM \ SED SHELL STRIP \ libname_spec library_names_spec soname_spec extract_expsyms_cmds \ old_striplib striplib file_magic_cmd finish_cmds finish_eval \ deplibs_check_method reload_flag reload_cmds need_locks \ lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ old_postinstall_cmds old_postuninstall_cmds \ compiler_GCJ \ CC_GCJ \ LD_GCJ \ lt_prog_compiler_wl_GCJ \ lt_prog_compiler_pic_GCJ \ lt_prog_compiler_static_GCJ \ lt_prog_compiler_no_builtin_flag_GCJ \ export_dynamic_flag_spec_GCJ \ thread_safe_flag_spec_GCJ \ whole_archive_flag_spec_GCJ \ enable_shared_with_static_runtimes_GCJ \ old_archive_cmds_GCJ \ old_archive_from_new_cmds_GCJ \ predep_objects_GCJ \ postdep_objects_GCJ \ predeps_GCJ \ postdeps_GCJ \ compiler_lib_search_path_GCJ \ archive_cmds_GCJ \ archive_expsym_cmds_GCJ \ postinstall_cmds_GCJ \ postuninstall_cmds_GCJ \ old_archive_from_expsyms_cmds_GCJ \ allow_undefined_flag_GCJ \ no_undefined_flag_GCJ \ export_symbols_cmds_GCJ \ hardcode_libdir_flag_spec_GCJ \ hardcode_libdir_flag_spec_ld_GCJ \ hardcode_libdir_separator_GCJ \ hardcode_automatic_GCJ \ module_cmds_GCJ \ module_expsym_cmds_GCJ \ lt_cv_prog_compiler_c_o_GCJ \ exclude_expsyms_GCJ \ include_expsyms_GCJ; do case $var in old_archive_cmds_GCJ | \ old_archive_from_new_cmds_GCJ | \ archive_cmds_GCJ | \ archive_expsym_cmds_GCJ | \ module_cmds_GCJ | \ module_expsym_cmds_GCJ | \ old_archive_from_expsyms_cmds_GCJ | \ export_symbols_cmds_GCJ | \ extract_expsyms_cmds | reload_cmds | finish_cmds | \ postinstall_cmds | postuninstall_cmds | \ old_postinstall_cmds | old_postuninstall_cmds | \ sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) # Double-quote double-evaled strings. eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ;; *) eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" ;; esac done case $lt_echo in *'\$0 --fallback-echo"') lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\$0 --fallback-echo"$/$0 --fallback-echo"/'` ;; esac cfgfile="$ofile" cat <<__EOF__ >> "$cfgfile" # ### BEGIN LIBTOOL TAG CONFIG: $tagname # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_GCJ # Whether or not to disallow shared libs when runtime libs are static allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_GCJ # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host # An echo program that does not interpret backslashes. echo=$lt_echo # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A C compiler. LTCC=$lt_LTCC # A language-specific compiler. CC=$lt_compiler_GCJ # Is the compiler the GNU C compiler? with_gcc=$GCC_GCJ # An ERE matcher. EGREP=$lt_EGREP # The linker used to build libraries. LD=$lt_LD_GCJ # Whether we need hard or soft links. LN_S=$lt_LN_S # A BSD-compatible nm program. NM=$lt_NM # A symbol stripping program STRIP=$lt_STRIP # Used to examine libraries when file_magic_cmd begins "file" MAGIC_CMD=$MAGIC_CMD # Used on cygwin: DLL creation program. DLLTOOL="$DLLTOOL" # Used on cygwin: object dumper. OBJDUMP="$OBJDUMP" # Used on cygwin: assembler. AS="$AS" # The name of the directory that contains temporary libtool files. objdir=$objdir # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_GCJ # Object file suffix (normally "o"). objext="$ac_objext" # Old archive suffix (normally "a"). libext="$libext" # Shared library suffix (normally ".so"). shrext_cmds='$shrext_cmds' # Executable file suffix (normally ""). exeext="$exeext" # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_GCJ pic_mode=$pic_mode # What is the maximum length of a command? max_cmd_len=$lt_cv_sys_max_cmd_len # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_GCJ # Must we lock files when doing compilation ? need_locks=$lt_need_locks # Do we need the lib prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_GCJ # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_GCJ # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_GCJ # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_GCJ # Compiler flag to generate thread-safe objects. thread_safe_flag_spec=$lt_thread_safe_flag_spec_GCJ # Library versioning type. version_type=$version_type # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME. library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Commands used to build and install an old-style archive. RANLIB=$lt_RANLIB old_archive_cmds=$lt_old_archive_cmds_GCJ old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_GCJ # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_GCJ # Commands used to build and install a shared archive. archive_cmds=$lt_archive_cmds_GCJ archive_expsym_cmds=$lt_archive_expsym_cmds_GCJ postinstall_cmds=$lt_postinstall_cmds postuninstall_cmds=$lt_postuninstall_cmds # Commands used to build a loadable module (assumed same as above if empty) module_cmds=$lt_module_cmds_GCJ module_expsym_cmds=$lt_module_expsym_cmds_GCJ # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # Dependencies to place before the objects being linked to create a # shared library. predep_objects=$lt_predep_objects_GCJ # Dependencies to place after the objects being linked to create a # shared library. postdep_objects=$lt_postdep_objects_GCJ # Dependencies to place before the objects being linked to create a # shared library. predeps=$lt_predeps_GCJ # Dependencies to place after the objects being linked to create a # shared library. postdeps=$lt_postdeps_GCJ # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_GCJ # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == file_magic. file_magic_cmd=$lt_file_magic_cmd # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_GCJ # Flag that forces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_GCJ # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # Same as above, but a single script fragment to be evaled but not shown. finish_eval=$lt_finish_eval # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # This is the shared library runtime path variable. runpath_var=$runpath_var # This is the shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_GCJ # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist. hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_GCJ # If ld is used when linking, flag to hardcode \$libdir into # a binary during linking. This must work even if \$libdir does # not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_GCJ # Whether we need a single -rpath flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_GCJ # Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the # resulting binary. hardcode_direct=$hardcode_direct_GCJ # Set to yes if using the -LDIR flag during linking hardcodes DIR into the # resulting binary. hardcode_minus_L=$hardcode_minus_L_GCJ # Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into # the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_GCJ # Set to yes if building a shared library automatically hardcodes DIR into the library # and all subsequent libraries and executables linked against it. hardcode_automatic=$hardcode_automatic_GCJ # Variables whose values should be saved in libtool wrapper scripts and # restored at relink time. variables_saved_for_relink="$variables_saved_for_relink" # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_GCJ # Compile-time system search path for libraries sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path="$fix_srcfile_path_GCJ" # Set to yes if exported symbols are required. always_export_symbols=$always_export_symbols_GCJ # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_GCJ # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_GCJ # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_GCJ # ### END LIBTOOL TAG CONFIG: $tagname __EOF__ else # If there is no Makefile yet, we rely on a make rule to execute # `config.status --recheck' to rerun these tests and create the # libtool script then. ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` if test -f "$ltmain_in"; then test -f Makefile && make "$ltmain" fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC="$lt_save_CC" else tagname="" fi ;; RC) # Source file extension for RC test sources. ac_ext=rc # Object file extension for compiled RC test sources. objext=o objext_RC=$objext # Code to be used in simple compile tests lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }\n' # Code to be used in simple link tests lt_simple_link_test_code="$lt_simple_compile_test_code" # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # Allow CC to be a program name with arguments. compiler=$CC # Allow CC to be a program name with arguments. lt_save_CC="$CC" CC=${RC-"windres"} compiler=$CC compiler_RC=$CC lt_cv_prog_compiler_c_o_RC=yes # The else clause should only fire when bootstrapping the # libtool distribution, otherwise you forgot to ship ltmain.sh # with your package, and you will get complaints that there are # no rules to generate ltmain.sh. if test -f "$ltmain"; then # See if we are running on zsh, and set the options which allow our commands through # without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi # Now quote all the things that may contain metacharacters while being # careful not to overquote the AC_SUBSTed values. We take copies of the # variables and quote the copies for generation of the libtool script. for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC NM \ SED SHELL STRIP \ libname_spec library_names_spec soname_spec extract_expsyms_cmds \ old_striplib striplib file_magic_cmd finish_cmds finish_eval \ deplibs_check_method reload_flag reload_cmds need_locks \ lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ old_postinstall_cmds old_postuninstall_cmds \ compiler_RC \ CC_RC \ LD_RC \ lt_prog_compiler_wl_RC \ lt_prog_compiler_pic_RC \ lt_prog_compiler_static_RC \ lt_prog_compiler_no_builtin_flag_RC \ export_dynamic_flag_spec_RC \ thread_safe_flag_spec_RC \ whole_archive_flag_spec_RC \ enable_shared_with_static_runtimes_RC \ old_archive_cmds_RC \ old_archive_from_new_cmds_RC \ predep_objects_RC \ postdep_objects_RC \ predeps_RC \ postdeps_RC \ compiler_lib_search_path_RC \ archive_cmds_RC \ archive_expsym_cmds_RC \ postinstall_cmds_RC \ postuninstall_cmds_RC \ old_archive_from_expsyms_cmds_RC \ allow_undefined_flag_RC \ no_undefined_flag_RC \ export_symbols_cmds_RC \ hardcode_libdir_flag_spec_RC \ hardcode_libdir_flag_spec_ld_RC \ hardcode_libdir_separator_RC \ hardcode_automatic_RC \ module_cmds_RC \ module_expsym_cmds_RC \ lt_cv_prog_compiler_c_o_RC \ exclude_expsyms_RC \ include_expsyms_RC; do case $var in old_archive_cmds_RC | \ old_archive_from_new_cmds_RC | \ archive_cmds_RC | \ archive_expsym_cmds_RC | \ module_cmds_RC | \ module_expsym_cmds_RC | \ old_archive_from_expsyms_cmds_RC | \ export_symbols_cmds_RC | \ extract_expsyms_cmds | reload_cmds | finish_cmds | \ postinstall_cmds | postuninstall_cmds | \ old_postinstall_cmds | old_postuninstall_cmds | \ sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) # Double-quote double-evaled strings. eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ;; *) eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" ;; esac done case $lt_echo in *'\$0 --fallback-echo"') lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\$0 --fallback-echo"$/$0 --fallback-echo"/'` ;; esac cfgfile="$ofile" cat <<__EOF__ >> "$cfgfile" # ### BEGIN LIBTOOL TAG CONFIG: $tagname # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_RC # Whether or not to disallow shared libs when runtime libs are static allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_RC # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host # An echo program that does not interpret backslashes. echo=$lt_echo # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A C compiler. LTCC=$lt_LTCC # A language-specific compiler. CC=$lt_compiler_RC # Is the compiler the GNU C compiler? with_gcc=$GCC_RC # An ERE matcher. EGREP=$lt_EGREP # The linker used to build libraries. LD=$lt_LD_RC # Whether we need hard or soft links. LN_S=$lt_LN_S # A BSD-compatible nm program. NM=$lt_NM # A symbol stripping program STRIP=$lt_STRIP # Used to examine libraries when file_magic_cmd begins "file" MAGIC_CMD=$MAGIC_CMD # Used on cygwin: DLL creation program. DLLTOOL="$DLLTOOL" # Used on cygwin: object dumper. OBJDUMP="$OBJDUMP" # Used on cygwin: assembler. AS="$AS" # The name of the directory that contains temporary libtool files. objdir=$objdir # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_RC # Object file suffix (normally "o"). objext="$ac_objext" # Old archive suffix (normally "a"). libext="$libext" # Shared library suffix (normally ".so"). shrext_cmds='$shrext_cmds' # Executable file suffix (normally ""). exeext="$exeext" # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_RC pic_mode=$pic_mode # What is the maximum length of a command? max_cmd_len=$lt_cv_sys_max_cmd_len # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_RC # Must we lock files when doing compilation ? need_locks=$lt_need_locks # Do we need the lib prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_RC # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_RC # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_RC # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_RC # Compiler flag to generate thread-safe objects. thread_safe_flag_spec=$lt_thread_safe_flag_spec_RC # Library versioning type. version_type=$version_type # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME. library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Commands used to build and install an old-style archive. RANLIB=$lt_RANLIB old_archive_cmds=$lt_old_archive_cmds_RC old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_RC # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_RC # Commands used to build and install a shared archive. archive_cmds=$lt_archive_cmds_RC archive_expsym_cmds=$lt_archive_expsym_cmds_RC postinstall_cmds=$lt_postinstall_cmds postuninstall_cmds=$lt_postuninstall_cmds # Commands used to build a loadable module (assumed same as above if empty) module_cmds=$lt_module_cmds_RC module_expsym_cmds=$lt_module_expsym_cmds_RC # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # Dependencies to place before the objects being linked to create a # shared library. predep_objects=$lt_predep_objects_RC # Dependencies to place after the objects being linked to create a # shared library. postdep_objects=$lt_postdep_objects_RC # Dependencies to place before the objects being linked to create a # shared library. predeps=$lt_predeps_RC # Dependencies to place after the objects being linked to create a # shared library. postdeps=$lt_postdeps_RC # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_RC # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == file_magic. file_magic_cmd=$lt_file_magic_cmd # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_RC # Flag that forces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_RC # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # Same as above, but a single script fragment to be evaled but not shown. finish_eval=$lt_finish_eval # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # This is the shared library runtime path variable. runpath_var=$runpath_var # This is the shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_RC # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist. hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_RC # If ld is used when linking, flag to hardcode \$libdir into # a binary during linking. This must work even if \$libdir does # not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_RC # Whether we need a single -rpath flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_RC # Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the # resulting binary. hardcode_direct=$hardcode_direct_RC # Set to yes if using the -LDIR flag during linking hardcodes DIR into the # resulting binary. hardcode_minus_L=$hardcode_minus_L_RC # Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into # the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_RC # Set to yes if building a shared library automatically hardcodes DIR into the library # and all subsequent libraries and executables linked against it. hardcode_automatic=$hardcode_automatic_RC # Variables whose values should be saved in libtool wrapper scripts and # restored at relink time. variables_saved_for_relink="$variables_saved_for_relink" # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_RC # Compile-time system search path for libraries sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path="$fix_srcfile_path_RC" # Set to yes if exported symbols are required. always_export_symbols=$always_export_symbols_RC # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_RC # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_RC # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_RC # ### END LIBTOOL TAG CONFIG: $tagname __EOF__ else # If there is no Makefile yet, we rely on a make rule to execute # `config.status --recheck' to rerun these tests and create the # libtool script then. ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` if test -f "$ltmain_in"; then test -f Makefile && make "$ltmain" fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC="$lt_save_CC" ;; *) { { echo "$as_me:$LINENO: error: Unsupported tag name: $tagname" >&5 echo "$as_me: error: Unsupported tag name: $tagname" >&2;} { (exit 1); exit 1; }; } ;; esac # Append the new tag name to the list of available tags. if test -n "$tagname" ; then available_tags="$available_tags $tagname" fi fi done IFS="$lt_save_ifs" # Now substitute the updated list of available tags. if eval "sed -e 's/^available_tags=.*\$/available_tags=\"$available_tags\"/' \"$ofile\" > \"${ofile}T\""; then mv "${ofile}T" "$ofile" chmod +x "$ofile" else rm -f "${ofile}T" { { echo "$as_me:$LINENO: error: unable to update list of available tagged configurations." >&5 echo "$as_me: error: unable to update list of available tagged configurations." >&2;} { (exit 1); exit 1; }; } fi fi # This can be used to rebuild libtool when needed LIBTOOL_DEPS="$ac_aux_dir/ltmain.sh" # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' # Prevent multiple expansion # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6 if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in ./ | .// | /cC/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi done done ;; esac done fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. We don't cache a # path for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the path is relative. INSTALL=$ac_install_sh fi fi echo "$as_me:$LINENO: result: $INSTALL" >&5 echo "${ECHO_T}$INSTALL" >&6 # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' echo "$as_me:$LINENO: checking for size_t" >&5 echo $ECHO_N "checking for size_t... $ECHO_C" >&6 if test "${ac_cv_type_size_t+set}" = set; then echo $ECHO_N "(cached) $ECHO_C" >&6 else cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default int main () { if ((size_t *) 0) return 0; if (sizeof (size_t)) return 0; ; return 0; } _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_cv_type_size_t=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_cv_type_size_t=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext fi echo "$as_me:$LINENO: result: $ac_cv_type_size_t" >&5 echo "${ECHO_T}$ac_cv_type_size_t" >&6 if test $ac_cv_type_size_t = yes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned _ACEOF fi for ac_header in string.h strings.h unistd.h stdint.h do as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` if eval "test \"\${$as_ac_Header+set}\" = set"; then echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 else # Is the header compilable? echo "$as_me:$LINENO: checking $ac_header usability" >&5 echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ $ac_includes_default #include <$ac_header> _ACEOF rm -f conftest.$ac_objext if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 (eval $ac_compile) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; } && { ac_try='test -s conftest.$ac_objext' { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 (eval $ac_try) 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); }; }; then ac_header_compiler=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_compiler=no fi rm -f conftest.err conftest.$ac_objext conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 echo "${ECHO_T}$ac_header_compiler" >&6 # Is the header present? echo "$as_me:$LINENO: checking $ac_header presence" >&5 echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 cat >conftest.$ac_ext <<_ACEOF /* confdefs.h. */ _ACEOF cat confdefs.h >>conftest.$ac_ext cat >>conftest.$ac_ext <<_ACEOF /* end confdefs.h. */ #include <$ac_header> _ACEOF if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 ac_status=$? grep -v '^ *+' conftest.er1 >conftest.err rm -f conftest.er1 cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } >/dev/null; then if test -s conftest.err; then ac_cpp_err=$ac_c_preproc_warn_flag ac_cpp_err=$ac_cpp_err$ac_c_werror_flag else ac_cpp_err= fi else ac_cpp_err=yes fi if test -z "$ac_cpp_err"; then ac_header_preproc=yes else echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_header_preproc=no fi rm -f conftest.err conftest.$ac_ext echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 echo "${ECHO_T}$ac_header_preproc" >&6 # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in yes:no: ) { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} ac_header_preproc=yes ;; no:yes:* ) { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} ( cat <<\_ASBOX ## ------------------------------------------ ## ## Report this to the libsphinxclient lists. ## ## ------------------------------------------ ## _ASBOX ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac echo "$as_me:$LINENO: checking for $ac_header" >&5 echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6 if eval "test \"\${$as_ac_Header+set}\" = set"; then echo $ECHO_N "(cached) $ECHO_C" >&6 else eval "$as_ac_Header=\$ac_header_preproc" fi echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 fi if test `eval echo '${'$as_ac_Header'}'` = yes; then cat >>confdefs.h <<_ACEOF #define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done DEFAULT_INSTALL_PREFIX="/usr/local" # Check whether --enable-debug or --disable-debug was given. if test "${enable_debug+set}" = set; then enableval="$enable_debug" if test x"$enableval" = xyes ; then debug="yes" else debug="no" fi fi; if test x"$debug" = xyes ; then cat >>confdefs.h <<\_ACEOF #define SPHINXCLIENT_DEBUG _ACEOF if test x"$GCC" = xyes; then CFLAGS=`echo "$CFLAGS" | /usr/bin/sed -e 's/-O[0-9s]*//g'` CFLAGS=`echo "$CFLAGS" | /usr/bin/sed -e 's/-g[0-2]\? //g'` CFLAGS="$CFLAGS -g3 -Wall -O0" fi INSTALL_STRIP_FLAG="" else INSTALL_STRIP_FLAG="-s" fi ac_config_files="$ac_config_files Makefile" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, don't put newlines in cache variables' values. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. { (set) 2>&1 | case `(ac_space=' '; set | grep ac_space) 2>&1` in *ac_space=\ *) # `set' does not quote correctly, so add quotes (double-quote # substitution turns \\\\ into \\, and sed turns \\ into \). sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n \ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" ;; esac; } | sed ' t clear : clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end /^ac_cv_env/!s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ : end' >>confcache if diff $cache_file confcache >/dev/null 2>&1; then :; else if test -w $cache_file; then test "x$cache_file" != "x/dev/null" && echo "updating cache $cache_file" cat confcache >$cache_file else echo "not updating unwritable cache $cache_file" fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' # VPATH may cause trouble with some makes, so we remove $(srcdir), # ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=/{ s/:*\$(srcdir):*/:/; s/:*\${srcdir}:*/:/; s/:*@srcdir@:*/:/; s/^\([^=]*=[ ]*\):*/\1/; s/:*$//; s/^[^=]*=[ ]*$//; }' fi DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_i=`echo "$ac_i" | sed 's/\$U\././;s/\.o$//;s/\.obj$//'` # 2. Add them. ac_libobjs="$ac_libobjs $ac_i\$U.$ac_objext" ac_ltlibobjs="$ac_ltlibobjs $ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then { { echo "$as_me:$LINENO: error: conditional \"MAINTAINER_MODE\" was never defined. Usually this means the macro was only invoked conditionally." >&5 echo "$as_me: error: conditional \"MAINTAINER_MODE\" was never defined. Usually this means the macro was only invoked conditionally." >&2;} { (exit 1); exit 1; }; } fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then { { echo "$as_me:$LINENO: error: conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." >&5 echo "$as_me: error: conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." >&2;} { (exit 1); exit 1; }; } fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then { { echo "$as_me:$LINENO: error: conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." >&5 echo "$as_me: error: conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." >&2;} { (exit 1); exit 1; }; } fi : ${CONFIG_STATUS=./config.status} ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { echo "$as_me:$LINENO: creating $CONFIG_STATUS" >&5 echo "$as_me: creating $CONFIG_STATUS" >&6;} cat >$CONFIG_STATUS <<_ACEOF #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF ## --------------------- ## ## M4sh Initialization. ## ## --------------------- ## # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then set -o posix fi DUALCASE=1; export DUALCASE # for MKS sh # Support unset when possible. if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then as_unset=unset else as_unset=false fi # Work around bugs in pre-3.0 UWIN ksh. $as_unset ENV MAIL MAILPATH PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. for as_var in \ LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ LC_TELEPHONE LC_TIME do if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then eval $as_var=C; export $as_var else $as_unset $as_var fi done # Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi if (basename /) >/dev/null 2>&1 && test "X`basename / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi # Name of the executable. as_me=`$as_basename "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)$' \| \ . : '\(.\)' 2>/dev/null || echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/; q; } /^X\/\(\/\/\)$/{ s//\1/; q; } /^X\/\(\/\).*/{ s//\1/; q; } s/.*/./; q'` # PATH needs CR, and LINENO needs CR and PATH. # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" || { # Find who we are. Look in the path if we contain no path at all # relative or not. case $0 in *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then { { echo "$as_me:$LINENO: error: cannot find myself; rerun with an absolute path" >&5 echo "$as_me: error: cannot find myself; rerun with an absolute path" >&2;} { (exit 1); exit 1; }; } fi case $CONFIG_SHELL in '') as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for as_base in sh bash ksh sh5; do case $as_dir in /*) if ("$as_dir/$as_base" -c ' as_lineno_1=$LINENO as_lineno_2=$LINENO as_lineno_3=`(expr $as_lineno_1 + 1) 2>/dev/null` test "x$as_lineno_1" != "x$as_lineno_2" && test "x$as_lineno_3" = "x$as_lineno_2" ') 2>/dev/null; then $as_unset BASH_ENV || test "${BASH_ENV+set}" != set || { BASH_ENV=; export BASH_ENV; } $as_unset ENV || test "${ENV+set}" != set || { ENV=; export ENV; } CONFIG_SHELL=$as_dir/$as_base export CONFIG_SHELL exec "$CONFIG_SHELL" "$0" ${1+"$@"} fi;; esac done done ;; esac # Create $as_me.lineno as a copy of $as_myself, but with $LINENO # uniformly replaced by the line number. The first 'sed' inserts a # line-number line before each line; the second 'sed' does the real # work. The second script uses 'N' to pair each line-number line # with the numbered line, and appends trailing '-' during # substitution so that $LINENO is not a special case at line end. # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the # second 'sed' script. Blame Lee E. McMahon for sed's syntax. :-) sed '=' <$as_myself | sed ' N s,$,-, : loop s,^\(['$as_cr_digits']*\)\(.*\)[$]LINENO\([^'$as_cr_alnum'_]\),\1\2\1\3, t loop s,-$,, s,^['$as_cr_digits']*\n,, ' >$as_me.lineno && chmod +x $as_me.lineno || { { echo "$as_me:$LINENO: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&5 echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2;} { (exit 1); exit 1; }; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensible to this). . ./$as_me.lineno # Exit status is that of the last command. exit } case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in *c*,-n*) ECHO_N= ECHO_C=' ' ECHO_T=' ' ;; *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; *) ECHO_N= ECHO_C='\c' ECHO_T= ;; esac if expr a : '\(a\)' >/dev/null 2>&1; then as_expr=expr else as_expr=false fi rm -f conf$$ conf$$.exe conf$$.file echo >conf$$.file if ln -s conf$$.file conf$$ 2>/dev/null; then # We could just check for DJGPP; but this test a) works b) is more generic # and c) will remain valid once DJGPP supports symlinks (DJGPP 2.04). if test -f conf$$.exe; then # Don't use ln at all; we don't have any links as_ln_s='cp -p' else as_ln_s='ln -s' fi elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.file if mkdir -p . 2>/dev/null; then as_mkdir_p=: else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_executable_p="test -f" # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # IFS # We need space, tab and new line, in precisely that order. as_nl=' ' IFS=" $as_nl" # CDPATH. $as_unset CDPATH exec 6>&1 # Open the log real soon, to keep \$[0] and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. Logging --version etc. is OK. exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX } >&5 cat >&5 <<_CSEOF This file was extended by libsphinxclient $as_me 0.0.1, which was generated by GNU Autoconf 2.59. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ _CSEOF echo "on `(hostname || uname -n) 2>/dev/null | sed 1q`" >&5 echo >&5 _ACEOF # Files that config.status was made for. if test -n "$ac_config_files"; then echo "config_files=\"$ac_config_files\"" >>$CONFIG_STATUS fi if test -n "$ac_config_headers"; then echo "config_headers=\"$ac_config_headers\"" >>$CONFIG_STATUS fi if test -n "$ac_config_links"; then echo "config_links=\"$ac_config_links\"" >>$CONFIG_STATUS fi if test -n "$ac_config_commands"; then echo "config_commands=\"$ac_config_commands\"" >>$CONFIG_STATUS fi cat >>$CONFIG_STATUS <<\_ACEOF ac_cs_usage="\ \`$as_me' instantiates files from templates according to the current configuration. Usage: $0 [OPTIONS] [FILE]... -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF ac_cs_version="\\ libsphinxclient config.status 0.0.1 configured by $0, generated by GNU Autoconf 2.59, with options \\"`echo "$ac_configure_args" | sed 's/[\\""\`\$]/\\\\&/g'`\\" Copyright (C) 2003 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." srcdir=$srcdir INSTALL="$INSTALL" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # If no file are specified by the user, then we need to provide default # value. By we need to know if files were specified by the user. ac_need_defaults=: while test $# != 0 do case $1 in --*=*) ac_option=`expr "x$1" : 'x\([^=]*\)='` ac_optarg=`expr "x$1" : 'x[^=]*=\(.*\)'` ac_shift=: ;; -*) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; *) # This is not an option, so the user has probably given explicit # arguments. ac_option=$1 ac_need_defaults=false;; esac case $ac_option in # Handling of the options. _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --vers* | -V ) echo "$ac_cs_version"; exit 0 ;; --he | --h) # Conflict between --help and --header { { echo "$as_me:$LINENO: error: ambiguous option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: ambiguous option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; };; --help | --hel | -h ) echo "$ac_cs_usage"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift CONFIG_FILES="$CONFIG_FILES $ac_optarg" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift CONFIG_HEADERS="$CONFIG_HEADERS $ac_optarg" ac_need_defaults=false;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) { { echo "$as_me:$LINENO: error: unrecognized option: $1 Try \`$0 --help' for more information." >&5 echo "$as_me: error: unrecognized option: $1 Try \`$0 --help' for more information." >&2;} { (exit 1); exit 1; }; } ;; *) ac_config_targets="$ac_config_targets $1" ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF if \$ac_cs_recheck; then echo "running $SHELL $0 " $ac_configure_args \$ac_configure_extra_args " --no-create --no-recursion" >&6 exec $SHELL $0 $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF # # INIT-COMMANDS section. # AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF for ac_config_target in $ac_config_targets do case "$ac_config_target" in # Handling of arguments. "Makefile" ) CONFIG_FILES="$CONFIG_FILES Makefile" ;; "depfiles" ) CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "sphinxclient_config.h" ) CONFIG_HEADERS="$CONFIG_HEADERS sphinxclient_config.h" ;; *) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5 echo "$as_me: error: invalid argument: $ac_config_target" >&2;} { (exit 1); exit 1; }; };; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason to put it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Create a temporary directory, and hook for its removal unless debugging. $debug || { trap 'exit_status=$?; rm -rf $tmp && exit $exit_status' 0 trap '{ (exit 1); exit 1; }' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d -q "./confstatXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" } || { tmp=./confstat$$-$RANDOM (umask 077 && mkdir $tmp) } || { echo "$me: cannot create a temporary directory in ." >&2 { (exit 1); exit 1; } } _ACEOF cat >>$CONFIG_STATUS <<_ACEOF # # CONFIG_FILES section. # # No need to generate the scripts if there are no CONFIG_FILES. # This happens for instance when ./config.status config.h if test -n "\$CONFIG_FILES"; then # Protect against being on the right side of a sed subst in config.status. sed 's/,@/@@/; s/@,/@@/; s/,;t t\$/@;t t/; /@;t t\$/s/[\\\\&,]/\\\\&/g; s/@@/,@/; s/@@/@,/; s/@;t t\$/,;t t/' >\$tmp/subs.sed <<\\CEOF s,@SHELL@,$SHELL,;t t s,@PATH_SEPARATOR@,$PATH_SEPARATOR,;t t s,@PACKAGE_NAME@,$PACKAGE_NAME,;t t s,@PACKAGE_TARNAME@,$PACKAGE_TARNAME,;t t s,@PACKAGE_VERSION@,$PACKAGE_VERSION,;t t s,@PACKAGE_STRING@,$PACKAGE_STRING,;t t s,@PACKAGE_BUGREPORT@,$PACKAGE_BUGREPORT,;t t s,@exec_prefix@,$exec_prefix,;t t s,@prefix@,$prefix,;t t s,@program_transform_name@,$program_transform_name,;t t s,@bindir@,$bindir,;t t s,@sbindir@,$sbindir,;t t s,@libexecdir@,$libexecdir,;t t s,@datadir@,$datadir,;t t s,@sysconfdir@,$sysconfdir,;t t s,@sharedstatedir@,$sharedstatedir,;t t s,@localstatedir@,$localstatedir,;t t s,@libdir@,$libdir,;t t s,@includedir@,$includedir,;t t s,@oldincludedir@,$oldincludedir,;t t s,@infodir@,$infodir,;t t s,@mandir@,$mandir,;t t s,@build_alias@,$build_alias,;t t s,@host_alias@,$host_alias,;t t s,@target_alias@,$target_alias,;t t s,@DEFS@,$DEFS,;t t s,@ECHO_C@,$ECHO_C,;t t s,@ECHO_N@,$ECHO_N,;t t s,@ECHO_T@,$ECHO_T,;t t s,@LIBS@,$LIBS,;t t s,@INSTALL_PROGRAM@,$INSTALL_PROGRAM,;t t s,@INSTALL_SCRIPT@,$INSTALL_SCRIPT,;t t s,@INSTALL_DATA@,$INSTALL_DATA,;t t s,@CYGPATH_W@,$CYGPATH_W,;t t s,@PACKAGE@,$PACKAGE,;t t s,@VERSION@,$VERSION,;t t s,@ACLOCAL@,$ACLOCAL,;t t s,@AUTOCONF@,$AUTOCONF,;t t s,@AUTOMAKE@,$AUTOMAKE,;t t s,@AUTOHEADER@,$AUTOHEADER,;t t s,@MAKEINFO@,$MAKEINFO,;t t s,@install_sh@,$install_sh,;t t s,@STRIP@,$STRIP,;t t s,@ac_ct_STRIP@,$ac_ct_STRIP,;t t s,@INSTALL_STRIP_PROGRAM@,$INSTALL_STRIP_PROGRAM,;t t s,@mkdir_p@,$mkdir_p,;t t s,@AWK@,$AWK,;t t s,@SET_MAKE@,$SET_MAKE,;t t s,@am__leading_dot@,$am__leading_dot,;t t s,@AMTAR@,$AMTAR,;t t s,@am__tar@,$am__tar,;t t s,@am__untar@,$am__untar,;t t s,@MAINTAINER_MODE_TRUE@,$MAINTAINER_MODE_TRUE,;t t s,@MAINTAINER_MODE_FALSE@,$MAINTAINER_MODE_FALSE,;t t s,@MAINT@,$MAINT,;t t s,@CC@,$CC,;t t s,@CFLAGS@,$CFLAGS,;t t s,@LDFLAGS@,$LDFLAGS,;t t s,@CPPFLAGS@,$CPPFLAGS,;t t s,@ac_ct_CC@,$ac_ct_CC,;t t s,@EXEEXT@,$EXEEXT,;t t s,@OBJEXT@,$OBJEXT,;t t s,@DEPDIR@,$DEPDIR,;t t s,@am__include@,$am__include,;t t s,@am__quote@,$am__quote,;t t s,@AMDEP_TRUE@,$AMDEP_TRUE,;t t s,@AMDEP_FALSE@,$AMDEP_FALSE,;t t s,@AMDEPBACKSLASH@,$AMDEPBACKSLASH,;t t s,@CCDEPMODE@,$CCDEPMODE,;t t s,@am__fastdepCC_TRUE@,$am__fastdepCC_TRUE,;t t s,@am__fastdepCC_FALSE@,$am__fastdepCC_FALSE,;t t s,@build@,$build,;t t s,@build_cpu@,$build_cpu,;t t s,@build_vendor@,$build_vendor,;t t s,@build_os@,$build_os,;t t s,@host@,$host,;t t s,@host_cpu@,$host_cpu,;t t s,@host_vendor@,$host_vendor,;t t s,@host_os@,$host_os,;t t s,@EGREP@,$EGREP,;t t s,@LN_S@,$LN_S,;t t s,@ECHO@,$ECHO,;t t s,@AR@,$AR,;t t s,@ac_ct_AR@,$ac_ct_AR,;t t s,@RANLIB@,$RANLIB,;t t s,@ac_ct_RANLIB@,$ac_ct_RANLIB,;t t s,@CPP@,$CPP,;t t s,@CXXCPP@,$CXXCPP,;t t s,@LIBTOOL@,$LIBTOOL,;t t s,@INSTALL_STRIP_FLAG@,$INSTALL_STRIP_FLAG,;t t s,@LIBOBJS@,$LIBOBJS,;t t s,@LTLIBOBJS@,$LTLIBOBJS,;t t CEOF _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # Split the substitutions into bite-sized pieces for seds with # small command number limits, like on Digital OSF/1 and HP-UX. ac_max_sed_lines=48 ac_sed_frag=1 # Number of current file. ac_beg=1 # First line for current file. ac_end=$ac_max_sed_lines # Line after last line for current file. ac_more_lines=: ac_sed_cmds= while $ac_more_lines; do if test $ac_beg -gt 1; then sed "1,${ac_beg}d; ${ac_end}q" $tmp/subs.sed >$tmp/subs.frag else sed "${ac_end}q" $tmp/subs.sed >$tmp/subs.frag fi if test ! -s $tmp/subs.frag; then ac_more_lines=false else # The purpose of the label and of the branching condition is to # speed up the sed processing (if there are no `@' at all, there # is no need to browse any of the substitutions). # These are the two extra sed commands mentioned above. (echo ':t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b' && cat $tmp/subs.frag) >$tmp/subs-$ac_sed_frag.sed if test -z "$ac_sed_cmds"; then ac_sed_cmds="sed -f $tmp/subs-$ac_sed_frag.sed" else ac_sed_cmds="$ac_sed_cmds | sed -f $tmp/subs-$ac_sed_frag.sed" fi ac_sed_frag=`expr $ac_sed_frag + 1` ac_beg=$ac_end ac_end=`expr $ac_end + $ac_max_sed_lines` fi done if test -z "$ac_sed_cmds"; then ac_sed_cmds=cat fi fi # test -n "$CONFIG_FILES" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF for ac_file in : $CONFIG_FILES; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac # Compute @srcdir@, @top_srcdir@, and @INSTALL@ for subdirectories. ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_builddir$INSTALL ;; esac if test x"$ac_file" != x-; then { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} rm -f "$ac_file" fi # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then configure_input= else configure_input="$ac_file. " fi configure_input=$configure_input"Generated from `echo $ac_file_in | sed 's,.*/,,'` by configure." # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } _ACEOF cat >>$CONFIG_STATUS <<_ACEOF sed "$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s,@configure_input@,$configure_input,;t t s,@srcdir@,$ac_srcdir,;t t s,@abs_srcdir@,$ac_abs_srcdir,;t t s,@top_srcdir@,$ac_top_srcdir,;t t s,@abs_top_srcdir@,$ac_abs_top_srcdir,;t t s,@builddir@,$ac_builddir,;t t s,@abs_builddir@,$ac_abs_builddir,;t t s,@top_builddir@,$ac_top_builddir,;t t s,@abs_top_builddir@,$ac_abs_top_builddir,;t t s,@INSTALL@,$ac_INSTALL,;t t " $ac_file_inputs | (eval "$ac_sed_cmds") >$tmp/out rm -f $tmp/stdin if test x"$ac_file" != x-; then mv $tmp/out $ac_file else cat $tmp/out rm -f $tmp/out fi done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # # CONFIG_HEADER section. # # These sed commands are passed to sed as "A NAME B NAME C VALUE D", where # NAME is the cpp macro being defined and VALUE is the value it is being given. # # ac_d sets the value in "#define NAME VALUE" lines. ac_dA='s,^\([ ]*\)#\([ ]*define[ ][ ]*\)' ac_dB='[ ].*$,\1#\2' ac_dC=' ' ac_dD=',;t' # ac_u turns "#undef NAME" without trailing blanks into "#define NAME VALUE". ac_uA='s,^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' ac_uB='$,\1#\2define\3' ac_uC=' ' ac_uD=',;t' for ac_file in : $CONFIG_HEADERS; do test "x$ac_file" = x: && continue # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". case $ac_file in - | *:- | *:-:* ) # input from stdin cat >$tmp/stdin ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; * ) ac_file_in=$ac_file.in ;; esac test x"$ac_file" != x- && { echo "$as_me:$LINENO: creating $ac_file" >&5 echo "$as_me: creating $ac_file" >&6;} # First look for the input files in the build tree, otherwise in the # src tree. ac_file_inputs=`IFS=: for f in $ac_file_in; do case $f in -) echo $tmp/stdin ;; [\\/$]*) # Absolute (can't be DOS-style, as IFS=:) test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } # Do quote $f, to prevent DOS paths from being IFS'd. echo "$f";; *) # Relative if test -f "$f"; then # Build tree echo "$f" elif test -f "$srcdir/$f"; then # Source tree echo "$srcdir/$f" else # /dev/null tree { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 echo "$as_me: error: cannot find input file: $f" >&2;} { (exit 1); exit 1; }; } fi;; esac done` || { (exit 1); exit 1; } # Remove the trailing spaces. sed 's/[ ]*$//' $ac_file_inputs >$tmp/in _ACEOF # Transform confdefs.h into two sed scripts, `conftest.defines' and # `conftest.undefs', that substitutes the proper values into # config.h.in to produce config.h. The first handles `#define' # templates, and the second `#undef' templates. # And first: Protect against being on the right side of a sed subst in # config.status. Protect against being in an unquoted here document # in config.status. rm -f conftest.defines conftest.undefs # Using a here document instead of a string reduces the quoting nightmare. # Putting comments in sed scripts is not portable. # # `end' is used to avoid that the second main sed command (meant for # 0-ary CPP macros) applies to n-ary macro definitions. # See the Autoconf documentation for `clear'. cat >confdef2sed.sed <<\_ACEOF s/[\\&,]/\\&/g s,[\\$`],\\&,g t clear : clear s,^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*\)\(([^)]*)\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1\2${ac_dC}\3${ac_dD},gp t end s,^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1${ac_dC}\2${ac_dD},gp : end _ACEOF # If some macros were called several times there might be several times # the same #defines, which is useless. Nevertheless, we may not want to # sort them, since we want the *last* AC-DEFINE to be honored. uniq confdefs.h | sed -n -f confdef2sed.sed >conftest.defines sed 's/ac_d/ac_u/g' conftest.defines >conftest.undefs rm -f confdef2sed.sed # This sed command replaces #undef with comments. This is necessary, for # example, in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. cat >>conftest.undefs <<\_ACEOF s,^[ ]*#[ ]*undef[ ][ ]*[a-zA-Z_][a-zA-Z_0-9]*,/* & */, _ACEOF # Break up conftest.defines because some shells have a limit on the size # of here documents, and old seds have small limits too (100 cmds). echo ' # Handle all the #define templates only if necessary.' >>$CONFIG_STATUS echo ' if grep "^[ ]*#[ ]*define" $tmp/in >/dev/null; then' >>$CONFIG_STATUS echo ' # If there are no defines, we may have an empty if/fi' >>$CONFIG_STATUS echo ' :' >>$CONFIG_STATUS rm -f conftest.tail while grep . conftest.defines >/dev/null do # Write a limited-size here document to $tmp/defines.sed. echo ' cat >$tmp/defines.sed <>$CONFIG_STATUS # Speed up: don't consider the non `#define' lines. echo '/^[ ]*#[ ]*define/!b' >>$CONFIG_STATUS # Work around the forget-to-reset-the-flag bug. echo 't clr' >>$CONFIG_STATUS echo ': clr' >>$CONFIG_STATUS sed ${ac_max_here_lines}q conftest.defines >>$CONFIG_STATUS echo 'CEOF sed -f $tmp/defines.sed $tmp/in >$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in ' >>$CONFIG_STATUS sed 1,${ac_max_here_lines}d conftest.defines >conftest.tail rm -f conftest.defines mv conftest.tail conftest.defines done rm -f conftest.defines echo ' fi # grep' >>$CONFIG_STATUS echo >>$CONFIG_STATUS # Break up conftest.undefs because some shells have a limit on the size # of here documents, and old seds have small limits too (100 cmds). echo ' # Handle all the #undef templates' >>$CONFIG_STATUS rm -f conftest.tail while grep . conftest.undefs >/dev/null do # Write a limited-size here document to $tmp/undefs.sed. echo ' cat >$tmp/undefs.sed <>$CONFIG_STATUS # Speed up: don't consider the non `#undef' echo '/^[ ]*#[ ]*undef/!b' >>$CONFIG_STATUS # Work around the forget-to-reset-the-flag bug. echo 't clr' >>$CONFIG_STATUS echo ': clr' >>$CONFIG_STATUS sed ${ac_max_here_lines}q conftest.undefs >>$CONFIG_STATUS echo 'CEOF sed -f $tmp/undefs.sed $tmp/in >$tmp/out rm -f $tmp/in mv $tmp/out $tmp/in ' >>$CONFIG_STATUS sed 1,${ac_max_here_lines}d conftest.undefs >conftest.tail rm -f conftest.undefs mv conftest.tail conftest.undefs done rm -f conftest.undefs cat >>$CONFIG_STATUS <<\_ACEOF # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ if test x"$ac_file" = x-; then echo "/* Generated by configure. */" >$tmp/config.h else echo "/* $ac_file. Generated by configure. */" >$tmp/config.h fi cat $tmp/in >>$tmp/config.h rm -f $tmp/in if test x"$ac_file" != x-; then if diff $ac_file $tmp/config.h >/dev/null 2>&1; then { echo "$as_me:$LINENO: $ac_file is unchanged" >&5 echo "$as_me: $ac_file is unchanged" >&6;} else ac_dir=`(dirname "$ac_file") 2>/dev/null || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } rm -f $ac_file mv $tmp/config.h $ac_file fi else cat $tmp/config.h rm -f $tmp/config.h fi # Compute $ac_file's index in $config_headers. _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $ac_file | $ac_file:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $ac_file" >`(dirname $ac_file) 2>/dev/null || $as_expr X$ac_file : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X$ac_file : 'X\(//\)[^/]' \| \ X$ac_file : 'X\(//\)$' \| \ X$ac_file : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X$ac_file | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'`/stamp-h$_am_stamp_count done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF # # CONFIG_COMMANDS section. # for ac_file in : $CONFIG_COMMANDS; do test "x$ac_file" = x: && continue ac_dest=`echo "$ac_file" | sed 's,:.*,,'` ac_source=`echo "$ac_file" | sed 's,[^:]*:,,'` ac_dir=`(dirname "$ac_dest") 2>/dev/null || $as_expr X"$ac_dest" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_dest" : 'X\(//\)[^/]' \| \ X"$ac_dest" : 'X\(//\)$' \| \ X"$ac_dest" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$ac_dest" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p "$ac_dir" else as_dir="$ac_dir" as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory \"$ac_dir\"" >&5 echo "$as_me: error: cannot create directory \"$ac_dir\"" >&2;} { (exit 1); exit 1; }; }; } ac_builddir=. if test "$ac_dir" != .; then ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` # A "../" for each directory in $ac_dir_suffix. ac_top_builddir=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,../,g'` else ac_dir_suffix= ac_top_builddir= fi case $srcdir in .) # No --srcdir option. We are building in place. ac_srcdir=. if test -z "$ac_top_builddir"; then ac_top_srcdir=. else ac_top_srcdir=`echo $ac_top_builddir | sed 's,/$,,'` fi ;; [\\/]* | ?:[\\/]* ) # Absolute path. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ;; *) # Relative path. ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_builddir$srcdir ;; esac # Do not use `cd foo && pwd` to compute absolute paths, because # the directories may not exist. case `pwd` in .) ac_abs_builddir="$ac_dir";; *) case "$ac_dir" in .) ac_abs_builddir=`pwd`;; [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; *) ac_abs_builddir=`pwd`/"$ac_dir";; esac;; esac case $ac_abs_builddir in .) ac_abs_top_builddir=${ac_top_builddir}.;; *) case ${ac_top_builddir}. in .) ac_abs_top_builddir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; esac;; esac case $ac_abs_builddir in .) ac_abs_srcdir=$ac_srcdir;; *) case $ac_srcdir in .) ac_abs_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; esac;; esac case $ac_abs_builddir in .) ac_abs_top_srcdir=$ac_top_srcdir;; *) case $ac_top_srcdir in .) ac_abs_top_srcdir=$ac_abs_builddir;; [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; esac;; esac { echo "$as_me:$LINENO: executing $ac_dest commands" >&5 echo "$as_me: executing $ac_dest commands" >&6;} case $ac_dest in depfiles ) test x"$AMDEP_TRUE" != x"" || for mf in $CONFIG_FILES; do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # So let's grep whole file. if grep '^#.*generated by automake' $mf > /dev/null 2>&1; then dirpart=`(dirname "$mf") 2>/dev/null || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`(dirname "$file") 2>/dev/null || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` { if $as_mkdir_p; then mkdir -p $dirpart/$fdir else as_dir=$dirpart/$fdir as_dirs= while test ! -d "$as_dir"; do as_dirs="$as_dir $as_dirs" as_dir=`(dirname "$as_dir") 2>/dev/null || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| \ . : '\(.\)' 2>/dev/null || echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } /^X\(\/\/\)[^/].*/{ s//\1/; q; } /^X\(\/\/\)$/{ s//\1/; q; } /^X\(\/\).*/{ s//\1/; q; } s/.*/./; q'` done test ! -n "$as_dirs" || mkdir $as_dirs fi || { { echo "$as_me:$LINENO: error: cannot create directory $dirpart/$fdir" >&5 echo "$as_me: error: cannot create directory $dirpart/$fdir" >&2;} { (exit 1); exit 1; }; }; } # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done ;; esac done _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF { (exit 0); exit 0; } _ACEOF chmod +x $CONFIG_STATUS ac_clean_files=$ac_clean_files_save # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || { (exit 1); exit 1; } fi sphinx-2.0.4-release/api/libsphinxclient/test.vcproj0000644000176700017710000000725411034764367022142 0ustar deogardeogar sphinx-2.0.4-release/api/libsphinxclient/build.mk0000644000176700017710000000222511037123041021334 0ustar deogardeogar SUPPRESS_WARNINGS = 2>&1 | (egrep -v '(AC_TRY_RUN called without default to allow cross compiling|AC_PROG_CXXCPP was called before AC_PROG_CXX|defined in acinclude.m4 but never used|AC_PROG_LEX invoked multiple times|AC_DECL_YYTEXT is expanded from...|the top level)'||true) AUTOCONF ?= 'autoconf' ACLOCAL ?= 'aclocal' AUTOHEADER ?= 'autoheader' AUTOMAKE ?= 'automake' AUTOUPDATE ?= 'autoupdate' LIBTOOLIZE ?= 'libtoolize' config_h_in = sphinxclient_config.h.in targets = $(config_h_in) configure makefiles all: $(targets) aclocal.m4: $(ACLOCAL) $(config_h_in): configure @echo rebuilding $@ @rm -f $@ $(AUTOHEADER) $(SUPPRESS_WARNINGS) configure: aclocal.m4 configure.in @echo rebuilding $@ $(LIBTOOLIZE) --copy $(AUTOCONF) $(SUPPRESS_WARNINGS) makefiles: configure Makefile.am @echo rebuilding Makefile.in files $(AUTOMAKE) --add-missing --copy cvsclean: @rm -rf *.lo *.la *.o *.a .libs Makefile Makefile.in stamp-h1 test sphinxclient_config.h* rm -rf aclocal.m4 autom4te.cache install.sh libtool Makefile Makefile.in 'configure.in~' missing config.h* configure rm -f config.guess config.log config.status config.sub cscope.out install-sh ltmain.sh sphinx-2.0.4-release/api/libsphinxclient/sphinxclient.c0000644000176700017710000017130211723657702022605 0ustar deogardeogar// // $Id: sphinxclient.c 3132 2012-03-01 11:38:42Z klirichek $ // // // Copyright (c) 2001-2012, Andrew Aksyonoff // Copyright (c) 2008-2012, Sphinx Technologies Inc // All rights reserved // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU Library General Public License. You should // have received a copy of the LGPL license along with this program; if you // did not, you can find it at http://www.gnu.org/ // #ifdef _WIN32 #if _MSC_VER>=1400 // VS 2005 and above #define _CRT_SECURE_NO_DEPRECATE 1 #define _CRT_NONSTDC_NO_DEPRECATE 1 #else // VS 2003 and below #define vsnprintf _vsnprintf #endif #endif #include #include #include #include #ifndef _WIN32 #include "sphinxclient_config.h" #endif #include "sphinxclient.h" #if _WIN32 // Win-specific headers, calls, libraries #include #include #pragma comment(linker, "/defaultlib:wsock32.lib") #pragma message("Automatically linking with wsock32.lib") #define EWOULDBLOCK WSAEWOULDBLOCK #define EINTR WSAEINTR #else // UNIX-specific headers and calls #include #include #include #include #include #include #include #include #include #include #endif ////////////////////////////////////////////////////////////////////////// #define MAX_REQS 32 #define CONNECT_TIMEOUT_MSEC 1000 #define MAX_PACKET_LEN (8*1024*1024) enum { SEARCHD_COMMAND_SEARCH = 0, SEARCHD_COMMAND_EXCERPT = 1, SEARCHD_COMMAND_UPDATE = 2, SEARCHD_COMMAND_KEYWORDS = 3, SEARCHD_COMMAND_PERSIST = 4, SEARCHD_COMMAND_STATUS = 5 }; enum { VER_COMMAND_EXCERPT = 0x103, VER_COMMAND_UPDATE = 0x102, VER_COMMAND_KEYWORDS = 0x100, VER_COMMAND_STATUS = 0x100 }; ////////////////////////////////////////////////////////////////////////// struct st_filter { const char * attr; int filter_type; int num_values; const sphinx_int64_t * values; sphinx_int64_t umin; sphinx_int64_t umax; float fmin; float fmax; int exclude; }; union un_attr_value { sphinx_int64_t int_value; float float_value; unsigned int * mva_value; const char * string; }; struct st_override { const char * attr; const sphinx_uint64_t * docids; int num_values; const unsigned int * uint_values; }; struct st_sphinx_client { unsigned short ver_search; ///< compatibility mode sphinx_bool copy_args; ///< whether to create a copy of each passed argument void * head_alloc; ///< head of client-owned allocations list const char * error; ///< last error const char * warning; ///< last warning char local_error_buf[256]; ///< buffer to store 'local' error messages (eg. connect() error) const char * host; int port; float timeout; int offset; int limit; int mode; int num_weights; const int * weights; int sort; const char * sortby; sphinx_uint64_t minid; sphinx_uint64_t maxid; const char * group_by; int group_func; const char * group_sort; const char * group_distinct; int max_matches; int cutoff; int retry_count; int retry_delay; const char * geoanchor_attr_lat; const char * geoanchor_attr_long; float geoanchor_lat; float geoanchor_long; int num_filters; int max_filters; struct st_filter * filters; int num_index_weights; const char ** index_weights_names; const int * index_weights_values; int ranker; const char * rankexpr; int max_query_time; int num_field_weights; const char ** field_weights_names; const int * field_weights_values; int num_overrides; int max_overrides; struct st_override * overrides; const char * select_list; int num_reqs; int req_lens [ MAX_REQS ]; char * reqs [ MAX_REQS ]; int response_len; char * response_buf; ///< where the buffer begins (might also contain heading warning) char * response_start; ///< where the data to parse starts int num_results; sphinx_result results [ MAX_REQS ]; int sock; ///< open socket for pconns; -1 if none sphinx_bool persist; }; ////////////////////////////////////////////////////////////////////////// static void * chain ( sphinx_client * client, const void * ptr, size_t len ); static const char * strchain ( sphinx_client * client, const char * s ); static void unchain ( sphinx_client * client, const void * ptr ); static void unchain_all ( sphinx_client * client ); sphinx_client * sphinx_create ( sphinx_bool copy_args ) { sphinx_client * client; int i; // allocate client = malloc ( sizeof(sphinx_client) ); if ( !client ) return NULL; // initialize defaults and return client->ver_search = 0x119; // 0x113 for 0.9.8, 0x116 for 0.9.9rc2 client->copy_args = copy_args; client->head_alloc = NULL; client->error = NULL; client->warning = NULL; client->local_error_buf[0] = '\0'; client->host = strchain ( client, "localhost" ); client->port = 9312; client->timeout = 0.0f; client->offset = 0; client->limit = 20; client->mode = SPH_MATCH_ALL; client->num_weights = 0; client->weights = NULL; client->sort = SPH_SORT_RELEVANCE; client->sortby = NULL; client->minid = 0; client->maxid = 0; client->group_by = NULL; client->group_func = SPH_GROUPBY_ATTR; client->group_sort = strchain ( client, "@groupby desc" ); client->group_distinct = NULL; client->max_matches = 1000; client->cutoff = 0; client->retry_count = 0; client->retry_delay = 0; client->geoanchor_attr_lat = NULL; client->geoanchor_attr_long = NULL; client->geoanchor_lat = 0.0f; client->geoanchor_long = 0.0f; client->num_filters = 0; client->max_filters = 0; client->filters = NULL; client->num_index_weights = 0; client->index_weights_names = NULL; client->index_weights_values = NULL; client->ranker = SPH_RANK_DEFAULT; client->rankexpr = NULL; client->max_query_time = 0; client->num_field_weights = 0; client->field_weights_names = NULL; client->field_weights_values = NULL; client->num_overrides = 0; client->max_overrides = 0; client->overrides = NULL; client->select_list = NULL; client->num_reqs = 0; client->response_len = 0; client->response_buf = NULL; client->num_results = 0; for ( i=0; iresults[i].values_pool = NULL; client->results[i].words = NULL; client->results[i].fields = NULL; client->results[i].attr_names = NULL; client->results[i].attr_types = NULL; } client->sock = -1; client->persist = SPH_FALSE; return client; } static void sphinx_free_results ( sphinx_client * client ) { int i; for ( i=0; inum_results; i++ ) { free ( client->results[i].values_pool ); free ( client->results[i].words ); free ( client->results[i].fields ); free ( client->results[i].attr_names ); free ( client->results[i].attr_types ); client->results[i].values_pool = NULL; client->results[i].words = NULL; client->results[i].fields = NULL; client->results[i].attr_names = NULL; client->results[i].attr_types = NULL; } client->num_results = 0; } void sock_close ( int sock ); #define safe_free(_ptr) \ if ( _ptr ) \ { \ free ( _ptr ); \ _ptr = NULL; \ } void sphinx_cleanup ( sphinx_client * client ) { int i; if ( !client ) return; for ( i=0; inum_reqs; i++ ) safe_free ( client->reqs[i] ); client->num_reqs = 0; sphinx_free_results ( client ); client->num_results = 0; client->num_results = 0; safe_free ( client->response_buf ); } void sphinx_destroy ( sphinx_client * client ) { int i; if ( !client ) return; for ( i=0; inum_reqs; i++ ) safe_free ( client->reqs[i] ); sphinx_free_results ( client ); unchain_all ( client ); safe_free ( client->filters ); safe_free ( client->response_buf ); if ( client->sock>=0 ) sock_close ( client->sock ); free ( client ); } const char * sphinx_error ( sphinx_client * client ) { return client->error ? client->error : ""; } const char * sphinx_warning ( sphinx_client * client ) { return client->warning ? client->warning : ""; } static void set_error ( sphinx_client * client, const char * template, ... ) { va_list ap; if ( !client ) return; va_start ( ap, template ); vsnprintf ( client->local_error_buf, sizeof(client->local_error_buf), template, ap ); va_end ( ap ); client->error = client->local_error_buf; client->warning = NULL; } ////////////////////////////////////////////////////////////////////////// struct st_memblock { struct st_memblock * prev; struct st_memblock * next; }; static void * chain ( sphinx_client * client, const void * ptr, size_t len ) { struct st_memblock * entry; if ( !client->copy_args || !ptr ) return (void*) ptr; entry = malloc ( sizeof(struct st_memblock) + len ); if ( !entry ) { set_error ( client, "malloc() failed (bytes=%d)", sizeof(struct st_memblock) + len ); return NULL; } entry->prev = NULL; entry->next = client->head_alloc; if ( entry->next ) entry->next->prev = entry; client->head_alloc = entry; entry++; memcpy ( entry, ptr, len ); return entry; } static const char * strchain ( sphinx_client * client, const char * s ) { return s ? chain ( client, s, 1+strlen(s) ) : NULL; } static void unchain ( sphinx_client * client, const void * ptr ) { struct st_memblock * entry; if ( !client->copy_args || !ptr ) return; entry = (struct st_memblock*) ptr; entry--; if ( entry->prev ) entry->prev->next = entry->next; else client->head_alloc = entry->next; if ( entry->next ) entry->next->prev = entry->prev; free ( entry ); } static void unchain_all ( sphinx_client * client ) { struct st_memblock *to_free, *cur; if ( !client || !client->copy_args ) return; cur = client->head_alloc; while ( cur ) { to_free = cur; cur = cur->next; free ( to_free ); } client->head_alloc = NULL; } ////////////////////////////////////////////////////////////////////////// sphinx_bool sphinx_set_server ( sphinx_client * client, const char * host, int port ) { if ( !client || !host || !host[0] ) { set_error ( client, "invalid arguments (host must not be empty)" ); return SPH_FALSE; } unchain ( client, client->host ); client->host = strchain ( client, host ); client->port = port; return SPH_TRUE; } sphinx_bool sphinx_set_connect_timeout ( sphinx_client * client, float seconds ) { if ( !client ) return SPH_FALSE; client->timeout = seconds; return SPH_TRUE; } sphinx_bool sphinx_set_limits ( sphinx_client * client, int offset, int limit, int max_matches, int cutoff ) { if ( !client || offset<0 || limit<=0 || max_matches<0 || cutoff<0 ) { if ( offset<0 ) set_error ( client, "invalid arguments (offset must be >= 0)" ); else if ( limit<=0 ) set_error ( client, "invalid arguments (limit must be > 0)" ); else if ( max_matches<0 ) set_error ( client, "invalid arguments (max_matches must be >= 0)" ); else if ( cutoff<0 ) set_error ( client, "invalid arguments (cutoff must be >= 0)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } client->offset = offset; client->limit = limit; if ( max_matches>=0 ) client->max_matches = max_matches; if ( cutoff>=0 ) client->cutoff = cutoff; return SPH_TRUE; } sphinx_bool sphinx_set_max_query_time ( sphinx_client * client, int max_query_time ) { if ( !client || max_query_time<=0 ) { set_error ( client, "invalid arguments (max_query_time must be > 0)" ); return SPH_FALSE; } client->max_query_time = max_query_time; return SPH_TRUE; } sphinx_bool sphinx_set_match_mode ( sphinx_client * client, int mode ) { if ( !client || modeSPH_MATCH_EXTENDED2 ) // FIXME? { set_error ( client, "invalid arguments (matching mode %d out of bounds)", mode ); return SPH_FALSE; } client->mode = mode; return SPH_TRUE; } sphinx_bool sphinx_set_ranking_mode ( sphinx_client * client, int ranker, const char * rankexpr ) { if ( !client || ranker=SPH_RANK_TOTAL ) // FIXME? { set_error ( client, "invalid arguments (ranking mode %d out of bounds)", ranker ); return SPH_FALSE; } client->ranker = ranker; client->rankexpr = strchain ( client, rankexpr ); return SPH_TRUE; } sphinx_bool sphinx_set_sort_mode ( sphinx_client * client, int mode, const char * sortby ) { if ( !client || modeSPH_SORT_EXPR || ( mode!=SPH_SORT_RELEVANCE && ( !sortby || !sortby[0] ) ) ) { if ( modeSPH_SORT_EXPR ) { set_error ( client, "invalid arguments (sorting mode %d out of bounds)", mode ); } else if ( mode!=SPH_SORT_RELEVANCE && ( !sortby || !sortby[0] ) ) { set_error ( client, "invalid arguments (sortby clause must not be empty)", mode ); } else { set_error ( client, "invalid arguments", mode ); } return SPH_FALSE; } client->sort = mode; unchain ( client, client->sortby ); client->sortby = strchain ( client, sortby ); return SPH_TRUE; } sphinx_bool sphinx_set_field_weights ( sphinx_client * client, int num_weights, const char ** field_names, const int * field_weights ) { int i; if ( !client || num_weights<=0 || !field_names || !field_weights ) { if ( num_weights<=0 ) set_error ( client, "invalid arguments (num_weights must be > 0)" ); else if ( !field_names ) set_error ( client, "invalid arguments (field_names must not be NULL)" ); else if ( !field_names ) set_error ( client, "invalid arguments (field_weights must not be NULL)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } if ( client->copy_args ) { for ( i=0; inum_field_weights; i++ ) unchain ( client, client->field_weights_names[i] ); unchain ( client, client->field_weights_names ); unchain ( client, client->field_weights_values ); field_names = chain ( client, field_names, num_weights*sizeof(const char*) ); for ( i=0; inum_field_weights = num_weights; client->field_weights_names = field_names; client->field_weights_values = field_weights; return SPH_TRUE; } sphinx_bool sphinx_set_index_weights ( sphinx_client * client, int num_weights, const char ** index_names, const int * index_weights ) { int i; if ( !client || num_weights<=0 || !index_names || !index_weights ) { if ( num_weights<=0 ) set_error ( client, "invalid arguments (num_weights must be > 0)" ); else if ( !index_names ) set_error ( client, "invalid arguments (index_names must not be NULL)" ); else if ( !index_names ) set_error ( client, "invalid arguments (index_weights must not be NULL)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } if ( client->copy_args ) { for ( i=0; inum_index_weights; i++ ) unchain ( client, client->index_weights_names[i] ); unchain ( client, client->index_weights_names ); unchain ( client, client->index_weights_values ); index_names = chain ( client, index_names, num_weights*sizeof(const char*) ); for ( i=0; inum_index_weights = num_weights; client->index_weights_names = index_names; client->index_weights_values = index_weights; return SPH_TRUE; } sphinx_bool sphinx_set_id_range ( sphinx_client * client, sphinx_uint64_t minid, sphinx_uint64_t maxid ) { if ( !client || minid>maxid ) { set_error ( client, "invalid arguments (minid must be <= maxid)" ); return SPH_FALSE; } client->minid = minid; client->maxid = maxid; return SPH_TRUE; } static struct st_filter * sphinx_add_filter_entry ( sphinx_client * client ) { int len; if ( client->num_filters>=client->max_filters ) { len = ( client->max_filters<=0 ) ? client->num_filters + 8 : 2*client->max_filters; len *= sizeof(struct st_filter); client->filters = realloc ( client->filters, len ); if ( !client->filters ) { set_error ( client, "realloc() failed (bytes=%d)", len ); return NULL; } } return client->filters + client->num_filters++; } sphinx_bool sphinx_add_filter ( sphinx_client * client, const char * attr, int num_values, const sphinx_int64_t * values, sphinx_bool exclude ) { struct st_filter * filter; if ( !client || !attr || num_values<=0 || !values ) { if ( !attr ) set_error ( client, "invalid arguments (attr must not be empty)" ); else if ( num_values<=0 ) set_error ( client, "invalid arguments (num_values must be > 0)" ); else if ( !values ) set_error ( client, "invalid arguments (values must not be NULL)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } filter = sphinx_add_filter_entry ( client ); if ( !filter ) return SPH_FALSE; filter->attr = strchain ( client, attr ); filter->filter_type = SPH_FILTER_VALUES; filter->num_values = num_values; filter->values = chain ( client, values, num_values*sizeof(sphinx_int64_t) ); filter->exclude = exclude; return SPH_TRUE; } sphinx_bool sphinx_add_filter_range ( sphinx_client * client, const char * attr, sphinx_int64_t umin, sphinx_int64_t umax, sphinx_bool exclude ) { struct st_filter * filter; if ( !client || !attr || umin>umax ) { if ( !attr ) set_error ( client, "invalid arguments (attr must not be empty)" ); else if ( umin>umax ) set_error ( client, "invalid arguments (umin must be <= umax)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } filter = sphinx_add_filter_entry ( client ); if ( !filter ) return SPH_FALSE; filter->attr = strchain ( client, attr ); filter->filter_type = SPH_FILTER_RANGE; filter->umin = umin; filter->umax = umax; filter->exclude = exclude; return SPH_TRUE; } sphinx_bool sphinx_add_filter_float_range ( sphinx_client * client, const char * attr, float fmin, float fmax, sphinx_bool exclude ) { struct st_filter * filter; if ( !client || !attr || fmin>fmax ) { if ( !attr ) set_error ( client, "invalid arguments (attr must not be empty)" ); else if ( fmin>fmax ) set_error ( client, "invalid arguments (fmin must be <= fmax)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } filter = sphinx_add_filter_entry ( client ); if ( !filter ) return SPH_FALSE; filter->attr = strchain ( client, attr ); filter->filter_type = SPH_FILTER_FLOATRANGE; filter->fmin = fmin; filter->fmax = fmax; filter->exclude = exclude; return SPH_TRUE; } sphinx_bool sphinx_set_geoanchor ( sphinx_client * client, const char * attr_latitude, const char * attr_longitude, float latitude, float longitude ) { if ( !client || !attr_latitude || !attr_latitude[0] || !attr_longitude || !attr_longitude[0] ) { if ( !attr_latitude || !attr_latitude[0] ) set_error ( client, "invalid arguments (attr_latitude must not be empty)" ); else if ( !attr_longitude || !attr_longitude[0] ) set_error ( client, "invalid arguments (attr_longitude must not be empty)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } unchain ( client, client->geoanchor_attr_lat ); unchain ( client, client->geoanchor_attr_long ); client->geoanchor_attr_lat = strchain ( client, attr_latitude ); client->geoanchor_attr_long = strchain ( client, attr_longitude ); client->geoanchor_lat = latitude; client->geoanchor_long = longitude; return SPH_TRUE; } sphinx_bool sphinx_set_groupby ( sphinx_client * client, const char * attr, int groupby_func, const char * group_sort ) { if ( !client || !attr || groupby_funcSPH_GROUPBY_ATTRPAIR ) { if ( !attr ) { set_error ( client, "invalid arguments (attr must not be empty)" ); } else if ( groupby_funcSPH_GROUPBY_ATTRPAIR ) { set_error ( client, "invalid arguments (groupby_func %d out of bounds)", groupby_func ); } else { set_error ( client, "invalid arguments" ); } return SPH_FALSE; } unchain ( client, client->group_by ); unchain ( client, client->group_sort ); client->group_by = strchain ( client, attr ); client->group_func = groupby_func; client->group_sort = strchain ( client, group_sort ? group_sort : "@groupby desc" ); return SPH_TRUE; } sphinx_bool sphinx_set_groupby_distinct ( sphinx_client * client, const char * attr ) { if ( !client || !attr ) { if ( !attr ) set_error ( client, "invalid arguments (attr must not be empty)" ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } unchain ( client, client->group_distinct ); client->group_distinct = strchain ( client, attr ); return SPH_TRUE; } sphinx_bool sphinx_set_retries ( sphinx_client * client, int count, int delay ) { if ( !client || count<0 || count>1000 || delay<0 || delay>100000 ) { if ( count<0 || count>1000 ) set_error ( client, "invalid arguments (count value %d out of bounds)", count ); else if ( delay<0 || delay>100000 ) set_error ( client, "invalid arguments (delay value %d out of bounds)", delay ); else set_error ( client, "invalid arguments" ); return SPH_FALSE; } client->retry_count = count; client->retry_delay = delay; return SPH_TRUE; } sphinx_bool sphinx_add_override ( sphinx_client * client, const char * attr, const sphinx_uint64_t * docids, int num_values, const unsigned int * values ) { struct st_override * p; if ( !client ) return SPH_FALSE; if ( client->ver_search<0x115 ) { set_error ( client, "sphinx_add_override not supported by chosen protocol version" ); return SPH_FALSE; } if ( client->num_overrides>=client->max_overrides ) { client->max_overrides = ( client->max_overrides<=0 ) ? 8 : 2*client->max_overrides; client->overrides = realloc ( client->overrides, client->max_overrides *sizeof(struct st_override) ); } p = client->overrides + client->num_overrides; client->num_overrides++; p->attr = strchain ( client, attr ); p->docids = chain ( client, docids, sizeof(sphinx_uint64_t)*num_values ); p->num_values = num_values; p->uint_values = chain ( client, values, sizeof(unsigned int)*num_values ); return SPH_TRUE; } sphinx_bool sphinx_set_select ( sphinx_client * client, const char * select_list ) { if ( !client ) return SPH_FALSE; if ( client->ver_search<0x116 ) { set_error ( client, "sphinx_set_select not supported by chosen protocol version" ); return SPH_FALSE; } unchain ( client, client->select_list ); client->select_list = strchain ( client, select_list ); return SPH_TRUE; } void sphinx_reset_filters ( sphinx_client * client ) { int i; if ( !client ) return; if ( client->filters ) { if ( client->copy_args ) for ( i=0; inum_filters; i++ ) { unchain ( client, client->filters[i].attr ); if ( client->filters[i].filter_type==SPH_FILTER_VALUES ) unchain ( client, client->filters[i].values ); } free ( client->filters ); client->filters = NULL; } client->num_filters = client->max_filters = 0; } void sphinx_reset_groupby ( sphinx_client * client ) { if ( !client ) return; unchain ( client, client->group_by ); unchain ( client, client->group_sort ); client->group_by = NULL; client->group_func = SPH_GROUPBY_ATTR; client->group_sort = strchain ( client, "@groupby desc" ); client->group_distinct = NULL; } ////////////////////////////////////////////////////////////////////////// static int sphinx_dismiss_requests ( sphinx_client * client ) { int nreqs = client->num_reqs, i; for ( i=0; inum_reqs; i++ ) free ( client->reqs[i] ); client->num_reqs = 0; return nreqs; } sphinx_result * sphinx_query ( sphinx_client * client, const char * query, const char * index_list, const char * comment ) { sphinx_result * res; if ( !client ) return NULL; if ( client->num_reqs!=0 ) { set_error ( client, "sphinx_query() must not be called after sphinx_add_query()" ); return NULL; } if ( sphinx_add_query ( client, query, index_list, comment )!=0 ) return NULL; res = sphinx_run_queries ( client ); // just a shortcut for client->results[0] sphinx_dismiss_requests ( client ); // sphinx_query() is fire and forget; dismiss request in all cases if ( !res ) return NULL; client->error = res->error; client->warning = res->warning; return ( res->status==SEARCHD_ERROR ) ? NULL : res; } static size_t safestrlen ( const char * s ) { return s ? strlen(s) : 0; } static int calc_req_len ( sphinx_client * client, const char * query, const char * index_list, const char * comment ) { int i, filter_val_size; size_t res; res = 96 + 2*(int)sizeof(sphinx_uint64_t) + 4*client->num_weights + safestrlen ( client->sortby ) + safestrlen ( query ) + safestrlen ( index_list ) + safestrlen ( client->group_by ) + safestrlen ( client->group_sort ) + safestrlen ( client->group_distinct ) + safestrlen ( comment ) + safestrlen ( client->rankexpr ); filter_val_size = ( client->ver_search>=0x114 ) ? 8 : 4; for ( i=0; inum_filters; i++ ) { const struct st_filter * filter = &client->filters[i]; res += 12 + safestrlen ( filter->attr ); // string attr-name; int type; int exclude-flag switch ( filter->filter_type ) { case SPH_FILTER_VALUES: res += 4 + filter_val_size*filter->num_values; break; // int values-count; uint32/int64[] values case SPH_FILTER_RANGE: res += 2*filter_val_size; break; // uint32/int64 min-val, max-val case SPH_FILTER_FLOATRANGE: res += 8; break; // float min-val,max-val } } if ( client->geoanchor_attr_lat && client->geoanchor_attr_long ) res += 16 + safestrlen ( client->geoanchor_attr_lat ) + safestrlen ( client->geoanchor_attr_long ); // string lat-attr, long-attr; float lat, long for ( i=0; inum_index_weights; i++ ) res += 8 + safestrlen ( client->index_weights_names[i] ); // string index-name; int index-weight for ( i=0; inum_field_weights; i++ ) res += 8 + safestrlen ( client->field_weights_names[i] ); // string field-name; int field-weight if ( client->ver_search>=0x115 ) { res += 4; // int overrides-count for ( i=0; inum_overrides; i++ ) { res += 8 + safestrlen ( client->overrides[i].attr ); // string attr, int attr-type res += 4 + 12*client->overrides[i].num_values; // int values-count, { uint64 docid, uint32 value }[] override } } if ( client->ver_search>=0x116 ) res += 4 + safestrlen ( client->select_list ); // string select_list return (int)res; } static void send_bytes ( char ** pp, const char * bytes, int len ) { char * ptr; int i; ptr = *pp; if ( ptr ) for ( i=0; i> 24 ) & 0xff; b[1] = ( value >> 16 ) & 0xff; b[2] = ( value >> 8 ) & 0xff; b[3] = ( value & 0xFF ); *pp += 4; } static void send_word ( char ** pp, unsigned short value ) { unsigned char * b = (unsigned char*) *pp; b[0] = ( value >> 8 ); b[1] = ( value & 0xFF ); *pp += 2; } static void send_str ( char ** pp, const char * s ) { int len; len = s ? (int)strlen(s) : 0; send_int ( pp, len ); send_bytes ( pp, s, len ); } static void send_qword ( char ** pp, sphinx_uint64_t value ) { send_int ( pp, (int)( value >> 32 ) ); send_int ( pp, (int)( value & ((sphinx_uint64_t)0xffffffffL) ) ); } static void send_float ( char ** pp, float value ) { union { float f; int i; } u; u.f = value; send_int ( pp, u.i ); } int sphinx_add_query ( sphinx_client * client, const char * query, const char * index_list, const char * comment ) { int i, j, req_len; char * req; if ( client->num_reqs<0 || client->num_reqs>=MAX_REQS ) { set_error ( client, "num_reqs=%d out of bounds (too many queries?)", client->num_reqs ); return -1; } req_len = calc_req_len ( client, query, index_list, comment ); req = malloc ( req_len ); if ( !req ) { set_error ( client, "malloc() failed (bytes=%d)", req_len ); return -1; } client->reqs[client->num_reqs] = req; client->req_lens[client->num_reqs] = req_len; client->num_reqs++; send_int ( &req, client->offset ); send_int ( &req, client->limit ); send_int ( &req, client->mode ); send_int ( &req, client->ranker ); if ( client->ranker==SPH_RANK_EXPR ) send_str ( &req, client->rankexpr ); send_int ( &req, client->sort ); send_str ( &req, client->sortby ); send_str ( &req, query ); send_int ( &req, client->num_weights ); for ( i=0; inum_weights; i++ ) send_int ( &req, client->weights[i] ); send_str ( &req, index_list ); send_int ( &req, 1 ); // id range bits send_qword ( &req, client->minid ); send_qword ( &req, client->maxid ); send_int ( &req, client->num_filters ); for ( i=0; inum_filters; i++ ) { send_str ( &req, client->filters[i].attr ); send_int ( &req, client->filters[i].filter_type ); switch ( client->filters[i].filter_type ) { case SPH_FILTER_VALUES: send_int ( &req, client->filters[i].num_values ); if ( client->ver_search>=0x114 ) { for ( j=0; jfilters[i].num_values; j++ ) send_qword ( &req, client->filters[i].values[j] ); } else { for ( j=0; jfilters[i].num_values; j++ ) send_int ( &req, (unsigned int)client->filters[i].values[j] ); } break; case SPH_FILTER_RANGE: if ( client->ver_search>=0x114 ) { send_qword ( &req, client->filters[i].umin ); send_qword ( &req, client->filters[i].umax ); } else { send_int ( &req, (unsigned int)client->filters[i].umin ); send_int ( &req, (unsigned int)client->filters[i].umax ); } break; case SPH_FILTER_FLOATRANGE: send_float ( &req, client->filters[i].fmin ); send_float ( &req, client->filters[i].fmax ); break; } send_int ( &req, client->filters[i].exclude ); } send_int ( &req, client->group_func ); send_str ( &req, client->group_by ); send_int ( &req, client->max_matches ); send_str ( &req, client->group_sort ); send_int ( &req, client->cutoff ); send_int ( &req, client->retry_count ); send_int ( &req, client->retry_delay ); send_str ( &req, client->group_distinct ); if ( client->geoanchor_attr_lat && client->geoanchor_attr_long ) { send_int ( &req, 1 ); send_str ( &req, client->geoanchor_attr_lat ); send_str ( &req, client->geoanchor_attr_long ); send_float ( &req, client->geoanchor_lat ); send_float ( &req, client->geoanchor_long ); } else { send_int ( &req, 0 ); } send_int ( &req, client->num_index_weights ); for ( i=0; inum_index_weights; i++ ) { send_str ( &req, client->index_weights_names[i] ); send_int ( &req, client->index_weights_values[i] ); } send_int ( &req, client->max_query_time ); send_int ( &req, client->num_field_weights ); for ( i=0; inum_field_weights; i++ ) { send_str ( &req, client->field_weights_names[i] ); send_int ( &req, client->field_weights_values[i] ); } send_str ( &req, comment ); if ( client->ver_search>=0x115 ) { send_int ( &req, client->num_overrides ); for ( i=0; inum_overrides; i++ ) { send_str ( &req, client->overrides[i].attr ); send_int ( &req, SPH_ATTR_INTEGER ); send_int ( &req, client->overrides[i].num_values ); for ( j=0; joverrides[i].num_values; j++ ) { send_qword ( &req, client->overrides[i].docids[j] ); send_int ( &req, client->overrides[i].uint_values[j] ); } } } if ( client->ver_search>=0x116 ) send_str ( &req, client->select_list ); if ( !req ) { set_error ( client, "internal error, failed to build request" ); free ( client->reqs [ --client->num_reqs ] ); return -1; } return client->num_reqs-1; } static const char * sock_error () { #if _WIN32 static char sBuf [ 256 ]; int iErr; iErr = WSAGetLastError (); _snprintf ( sBuf, sizeof(sBuf), "WSA error %d", iErr ); return sBuf; #else return strerror ( errno ); #endif } static int sock_errno () { #ifdef _WIN32 return WSAGetLastError (); #else return errno; #endif } static int sock_set_nonblocking ( int sock ) { #if _WIN32 u_long uMode = 1; return ioctlsocket ( sock, FIONBIO, &uMode ); #else return fcntl ( sock, F_SETFL, O_NONBLOCK ); #endif } static int sock_set_blocking ( int sock ) { #if _WIN32 u_long uMode = 0; return ioctlsocket ( sock, FIONBIO, &uMode ); #else return fcntl ( sock, F_SETFL, 0 ); #endif } void sock_close ( int sock ) { if ( sock<0 ) return; #if _WIN32 closesocket ( sock ); #else close ( sock ); #endif } // wrap FD_SET to prevent warnings on Windows #if _WIN32 #pragma warning(disable:4127) // conditional expr is const #pragma warning(disable:4389) // signed/unsigned mismatch void SPH_FD_SET ( int fd, fd_set * fdset ) { FD_SET ( fd, fdset ); } #pragma warning(default:4127) // conditional expr is const #pragma warning(default:4389) // signed/unsigned mismatch #else // !USE_WINDOWS #define SPH_FD_SET FD_SET #endif static sphinx_bool net_write ( int fd, const char * bytes, int len, sphinx_client * client ) { int res; #if defined(_WIN32) || defined(SO_NOSIGPIPE) || !defined(MSG_NOSIGNAL) res = send ( fd, bytes, len, 0 ); #else res = send ( fd, bytes, len, MSG_NOSIGNAL ); #endif if ( res<0 ) { set_error ( client, "send() error: %s", sock_error() ); return SPH_FALSE; } if ( res!=len ) { set_error ( client, "send() error: incomplete write (len=%d, sent=%d)", len, res ); return SPH_FALSE; } return SPH_TRUE; } static sphinx_bool net_read ( int fd, char * buf, int len, sphinx_client * client ) { int res, err; for ( ;; ) { res = recv ( fd, buf, len, 0 ); if ( res<0 ) { err = sock_errno(); if ( err==EINTR || err==EWOULDBLOCK ) // FIXME! remove non-blocking mode here; add timeout continue; set_error ( client, "recv(): read error (error=%s)", sock_error() ); return SPH_FALSE; } len -= res; buf += res; if ( len==0 ) return SPH_TRUE; if ( res==0 ) { set_error ( client, "recv(): incomplete read (len=%d, recv=%d)", len, res ); return SPH_FALSE; } } } static int net_create_inet_sock ( sphinx_client * client ) { struct hostent * hp; struct sockaddr_in sa; int sock, res, err, optval; hp = gethostbyname ( client->host ); if ( !hp ) { set_error ( client, "host name lookup failed (host=%s, error=%s)", client->host, sock_error() ); return -1; } memset ( &sa, 0, sizeof(sa) ); memcpy ( &sa.sin_addr, hp->h_addr_list[0], hp->h_length ); sa.sin_family = hp->h_addrtype; sa.sin_port = htons ( (unsigned short)client->port ); sock = (int) socket ( hp->h_addrtype, SOCK_STREAM, 0 ); if ( sock<0 ) { set_error ( client, "socket() failed: %s", sock_error() ); return -1; } if ( sock_set_nonblocking ( sock )<0 ) { set_error ( client, "sock_set_nonblocking() failed: %s", sock_error() ); return -1; } optval = 1; #if defined(SO_NOSIGPIPE) if ( setsockopt ( sock, SOL_SOCKET, SO_NOSIGPIPE, (void *)&optval, (socklen_t)sizeof(optval) ) < 0 ) { set_error ( client, "setsockopt() failed: %s", sock_error() ); return -1; } #endif res = connect ( sock, (struct sockaddr*)&sa, sizeof(sa) ); if ( res==0 ) return sock; err = sock_errno(); #ifdef EINPROGRESS if ( err!=EWOULDBLOCK && err!=EINPROGRESS ) #else if ( err!=EWOULDBLOCK ) #endif { set_error ( client, "connect() failed: %s", sock_error() ); return -1; } return sock; } #ifndef _WIN32 static int net_create_unix_sock ( sphinx_client * client ) { struct hostent * hp; struct sockaddr_un uaddr; int sock, res, err, optval, len; len = strlen ( client->host ); if ( len + 1 > sizeof( uaddr.sun_path ) ) set_error ( client, "UNIX socket path is too long (len=%d)", len ); memset ( &uaddr, 0, sizeof(uaddr) ); uaddr.sun_family = AF_UNIX; memcpy ( uaddr.sun_path, client->host, len + 1 ); sock = socket ( AF_UNIX, SOCK_STREAM, 0 ); if ( sock<0 ) { set_error ( client, "UNIX socket() failed: %s", sock_error() ); return -1; } if ( sock_set_nonblocking ( sock )<0 ) { set_error ( client, "sock_set_nonblocking() failed: %s", sock_error() ); return -1; } optval = 1; #if defined(SO_NOSIGPIPE) if ( setsockopt ( sock, SOL_SOCKET, SO_NOSIGPIPE, (void *)&optval, (socklen_t)sizeof(optval) ) < 0 ) { set_error ( client, "setsockopt() failed: %s", sock_error() ); return -1; } #endif res = connect ( sock, (struct sockaddr *)&uaddr, sizeof(uaddr) ); if ( res==0 ) return sock; err = sock_errno(); #ifdef EINPROGRESS if ( err!=EWOULDBLOCK && err!=EINPROGRESS ) #else if ( err!=EWOULDBLOCK ) #endif { set_error ( client, "connect() failed: %s", sock_error() ); return -1; } return sock; } #endif static int net_connect_get ( sphinx_client * client ) { struct timeval timeout; fd_set fds_write; int sock, to_wait, res, my_proto; if ( client->sock>=0 ) return client->sock; sock = -1; if ( client->host[0]!='/' ) { sock = net_create_inet_sock ( client ); } else { #ifdef _WIN32 set_error ( client, "UNIX sockets are not supported on Windows" ); return -1; #else sock = net_create_unix_sock ( client ); #endif } if ( sock<0 ) return -1; to_wait = (int)( 1000*client->timeout ); if ( to_wait<=0 ) to_wait = CONNECT_TIMEOUT_MSEC; { timeout.tv_sec = to_wait / 1000; // full seconds timeout.tv_usec = ( to_wait % 1000 ) * 1000; // remainder is msec, so *1000 for usec FD_ZERO ( &fds_write ); SPH_FD_SET ( sock, &fds_write ); res = select ( 1+sock, NULL, &fds_write, NULL, &timeout ); if ( res>=0 && FD_ISSET ( sock, &fds_write ) ) { sock_set_blocking ( sock ); // now send major client protocol version my_proto = htonl ( 1 ); if ( !net_write ( sock, (char*)&my_proto, sizeof(my_proto), client ) ) { sock_close ( sock ); set_error ( client, "failed to send client protocol version" ); return -1; } // check daemon version if ( !net_read ( sock, (char*)&my_proto, sizeof(my_proto), client ) ) { sock_close ( sock ); return -1; } my_proto = ntohl ( my_proto ); if ( my_proto<1 ) { sock_close ( sock ); set_error ( client, "expected searchd protocol version 1+, got version %d", my_proto ); return -1; } return sock; } /*!COMMIT handle EINTR here*/ sock_close ( sock ); set_error ( client, "connect() timed out" ); return -1; } } static sphinx_bool net_sock_eof ( int sock ) { struct timeval tv; fd_set fds_read, fds_except; int res; char buf; // wrong arg, consider dead if ( sock<0 ) return SPH_TRUE; // select() on a socket and watch for exceptions FD_ZERO ( &fds_read ); FD_ZERO ( &fds_except ); SPH_FD_SET ( sock, &fds_read ); SPH_FD_SET ( sock, &fds_except ); tv.tv_sec = 0; tv.tv_usec = 0; res = select ( 1+sock, &fds_read, NULL, &fds_except, &tv ); // select() failed, assume something is wrong if ( res<0 ) return SPH_TRUE; // got any events to read? (either normal via fds_read, or OOB via fds_except set) if ( FD_ISSET ( sock, &fds_read ) || FD_ISSET ( sock, &fds_except ) ) if ( recv ( sock, &buf, sizeof(buf), MSG_PEEK )<=0 ) if ( sock_errno()!=EWOULDBLOCK ) return SPH_TRUE; // it seems alive return SPH_FALSE; } static int net_connect_ex ( sphinx_client * client ) { if ( client->sock>=0 ) { // in case of a persistent connection, check for eof // then attempt to reestablish lost pconn once if ( !net_sock_eof ( client->sock ) ) return client->sock; sock_close ( client->sock ); client->sock = -1; } if ( !client->persist ) return net_connect_get ( client ); sphinx_open ( client ); return client->sock; } static unsigned short unpack_short ( char ** cur ) { unsigned short v; memcpy ( &v, *cur, sizeof(unsigned short) ); (*cur) += sizeof(unsigned short); return ntohs ( v ); } static unsigned int unpack_int ( char ** cur ) { unsigned int v; memcpy ( &v, *cur, sizeof(unsigned int) ); (*cur) += sizeof(unsigned int); return ntohl ( v ); } static char * unpack_str ( char ** cur ) { // we play a trick // we move the string in-place to free space for trailing zero but avoid malloc unsigned int len; len = unpack_int ( cur ); memmove ( (*cur)-1, (*cur), len ); (*cur) += len; (*cur)[-1] = '\0'; return (*cur)-len-1; } static sphinx_uint64_t unpack_qword ( char ** cur ) { sphinx_uint64_t hi, lo; hi = unpack_int ( cur ); lo = unpack_int ( cur ); return ( hi<<32 ) + lo; } static float unpack_float ( char ** cur ) { union { unsigned int n; float f; } u; u.n = unpack_int ( cur ); return u.f; } static void net_get_response ( int fd, sphinx_client * client ) { int len; char header_buf[32], *cur, *response; unsigned short status, ver; // dismiss previous response if ( client->response_buf ) { free ( client->response_buf ); client->response_len = 0; client->response_buf = NULL; } // read and parse the header if ( !net_read ( fd, header_buf, 8, client ) ) { sock_close ( fd ); if ( client->sock>0 ) client->sock = -1; return; } cur = header_buf; status = unpack_short ( &cur ); ver = unpack_short ( &cur ); len = unpack_int ( &cur ); // sanity check the length, alloc the buffer if ( len<0 || len>MAX_PACKET_LEN ) { sock_close ( fd ); if ( client->sock>0 ) client->sock = -1; set_error ( client, "response length out of bounds (len=%d)", len ); return; } response = malloc ( len ); if ( !response ) { sock_close ( fd ); if ( client->sock>0 ) client->sock = -1; set_error ( client, "malloc() failed (bytes=%d)", len ); return; } // read the response if ( !net_read ( fd, response, len, client ) ) { sock_close ( fd ); if ( client->sock>0 ) client->sock = -1; free ( response ); return; } // check status cur = response; switch ( status ) { case SEARCHD_OK: case SEARCHD_WARNING: client->error = NULL; // so far so good if ( status==SEARCHD_WARNING ) client->warning = unpack_str ( &cur ); else client->warning = NULL; client->response_len = len; client->response_buf = response; client->response_start = cur; break; case SEARCHD_ERROR: case SEARCHD_RETRY: // copy error message, so that we can immediately free the response set_error ( client, "%s", unpack_str ( &cur ) ); free ( response ); break; default: set_error ( client, "unknown status code (status=%d)", status ); free ( response ); break; } // close one-time socket on success if ( client->sock<0 ) sock_close ( fd ); } sphinx_bool sphinx_open ( sphinx_client * client ) { char buf[16], *pbuf; if ( client->sock>=0 ) { set_error ( client, "already connected" ); return SPH_FALSE; } client->sock = net_connect_get ( client ); if ( client->sock<0 ) return SPH_FALSE; pbuf = buf; send_word ( &pbuf, SEARCHD_COMMAND_PERSIST ); send_word ( &pbuf, 0 ); // dummy version send_int ( &pbuf, 4 ); // dummy body len send_int ( &pbuf, 1 ); // dummy body if ( !net_write ( client->sock, buf, (int)(pbuf-buf), client ) ) { sock_close ( client->sock ); client->sock = -1; return SPH_FALSE; } client->persist = SPH_TRUE; return SPH_TRUE; } sphinx_bool sphinx_close ( sphinx_client * client ) { if ( client->sock<0 ) { set_error ( client, "not connected" ); return SPH_FALSE; } sock_close ( client->sock ); client->sock = -1; client->persist = SPH_FALSE; return SPH_TRUE; } static void * sphinx_malloc ( int len, sphinx_client * client ) { void * res; if ( len<0 || len>MAX_PACKET_LEN ) { set_error ( client, "malloc() length out of bounds, possibly broken response packet (len=%d)", len ); return NULL; } res = malloc ( len ); if ( !res ) set_error ( client, "malloc() failed (bytes=%d)", len ); return res; } sphinx_result * sphinx_run_queries ( sphinx_client * client ) { int i, j, k, l, fd, len, nreqs, id64; char req_header[32], *req, *p, *pmax; sphinx_result * res; union un_attr_value * pval; if ( !client ) return NULL; if ( client->num_reqs<=0 || client->num_reqs>MAX_REQS ) { set_error ( client, "num_reqs=%d out of bounds (too many queries?)", client->num_reqs ); return NULL; } fd = net_connect_ex ( client ); if ( fd<0 ) return NULL; // free previous results sphinx_free_results ( client ); // send query, get response len = 8; for ( i=0; inum_reqs; i++ ) len += client->req_lens[i]; req = req_header; send_word ( &req, SEARCHD_COMMAND_SEARCH ); send_word ( &req, client->ver_search ); send_int ( &req, len ); send_int ( &req, 0 ); // its a client send_int ( &req, client->num_reqs ); if ( !net_write ( fd, req_header, (int)(req-req_header), client ) ) return NULL; for ( i=0; inum_reqs; i++ ) if ( !net_write ( fd, client->reqs[i], client->req_lens[i], client ) ) return NULL; net_get_response ( fd, client ); if ( !client->response_buf ) return NULL; // dismiss request data, memorize count nreqs = sphinx_dismiss_requests ( client ); // parse response p = client->response_start; pmax = client->response_start + client->response_len; // max position for checks, to protect against broken responses for ( i=0; iresults[i]; client->num_results++; res->error = NULL; res->warning = NULL; res->status = unpack_int ( &p ); if ( res->status!=SEARCHD_OK ) { if ( res->status==SEARCHD_WARNING ) { res->warning = unpack_str ( &p ); } else { res->error = unpack_str ( &p ); continue; } } // fields res->num_fields = unpack_int ( &p ); res->fields = sphinx_malloc ( res->num_fields*sizeof(const char*), client ); if ( !res->fields ) return NULL; for ( j=0; jnum_fields; j++ ) res->fields[j] = unpack_str ( &p ); // attrs res->num_attrs = unpack_int ( &p ); res->attr_names = sphinx_malloc ( res->num_attrs*sizeof(const char*), client ); if ( !res->attr_names ) return NULL; res->attr_types = sphinx_malloc ( res->num_attrs*sizeof(int), client ); if ( !res->attr_types ) return NULL; for ( j=0; jnum_attrs; j++ ) { res->attr_names[j] = unpack_str ( &p ); res->attr_types[j] = unpack_int ( &p ); } // match count, id bits flag res->num_matches = unpack_int ( &p ); id64 = unpack_int ( &p ); res->values_pool = sphinx_malloc ( (2+res->num_attrs) * res->num_matches * sizeof(union un_attr_value), client ); if ( !res->values_pool ) return NULL; pval = res->values_pool; // matches for ( j=0; jnum_matches; j++ ) { // id if ( id64 ) pval->int_value = unpack_qword ( &p ); else pval->int_value = unpack_int ( &p ); pval++; // weight pval->int_value = unpack_int ( &p ); pval++; // attrs for ( k=0; knum_attrs; k++ ) { switch ( res->attr_types[k] ) { case SPH_ATTR_MULTI64: case SPH_ATTR_MULTI: /*!COMMIT this is totally unsafe on some arches (eg. SPARC)*/ pval->mva_value = (unsigned int *) p; len = unpack_int ( &p ); for ( l=0; l<=len; l++ ) // including first one that is len pval->mva_value[l] = ntohl ( pval->mva_value[l] ); if ( res->attr_types[k]==SPH_ATTR_MULTI64 ) { pval->mva_value[0] = pval->mva_value[0]/2; } p += len*sizeof(unsigned int); break; case SPH_ATTR_FLOAT: pval->float_value = unpack_float ( &p ); break; case SPH_ATTR_BIGINT: pval->int_value = unpack_qword ( &p ); break; case SPH_ATTR_STRING: pval->string = unpack_str ( &p ); break; default: pval->int_value = unpack_int ( &p ); break; } pval++; } } // totals res->total = unpack_int ( &p ); res->total_found = unpack_int ( &p ); res->time_msec = unpack_int ( &p ); res->num_words = unpack_int ( &p ); if ( res->words ) free ( res->words ); res->words = sphinx_malloc ( res->num_words*sizeof(struct st_sphinx_wordinfo), client ); if ( !res->words ) return NULL; // words for ( j=0; jnum_words; j++ ) { res->words[j].word = unpack_str ( &p ); res->words[j].docs = unpack_int ( &p ); res->words[j].hits = unpack_int ( &p ); } // sanity check // FIXME! add it to each unpack? if ( p>pmax ) { set_error ( client, "unpack error (req=%d, reqs=%d)", i, nreqs ); return NULL; } } return client->results; } ////////////////////////////////////////////////////////////////////////// int sphinx_get_num_results ( sphinx_client * client ) { return client ? client->num_results : -1; } sphinx_uint64_t sphinx_get_id ( sphinx_result * result, int match ) { return sphinx_get_int ( result, match, -2 ); } int sphinx_get_weight ( sphinx_result * result, int match ) { return (int)sphinx_get_int ( result, match, -1 ); } sphinx_int64_t sphinx_get_int ( sphinx_result * result, int match, int attr ) { // FIXME! add safety and type checks union un_attr_value * pval; pval = result->values_pool; return pval [ (2+result->num_attrs)*match+2+attr ].int_value; } float sphinx_get_float ( sphinx_result * result, int match, int attr ) { // FIXME! add safety and type checks union un_attr_value * pval; pval = result->values_pool; return pval [ (2+result->num_attrs)*match+2+attr ].float_value; } unsigned int * sphinx_get_mva ( sphinx_result * result, int match, int attr ) { // FIXME! add safety and type checks union un_attr_value * pval; pval = result->values_pool; return pval [ (2+result->num_attrs)*match+2+attr ].mva_value; } sphinx_uint64_t sphinx_get_mva64_value ( unsigned int * mva, int i ) { sphinx_uint64_t uVal; uVal = ( ( ( (sphinx_uint64_t)( mva[i*2] ) )<<32 ) | (sphinx_uint64_t)( mva[i*2+1] ) ); return uVal; } const char * sphinx_get_string ( sphinx_result * result, int match, int attr ) { // FIXME! add safety and type checks union un_attr_value * pval; pval = result->values_pool; return pval [ (2+result->num_attrs)*match+2+attr ].string; } ////////////////////////////////////////////////////////////////////////// static sphinx_bool net_simple_query ( sphinx_client * client, char * buf, int req_len ) { int fd; fd = net_connect_ex ( client ); if ( fd<0 ) { free ( buf ); return SPH_FALSE; } if ( !net_write ( fd, buf, 8+req_len, client ) ) { free ( buf ); return SPH_FALSE; } free ( buf ); net_get_response ( fd, client ); if ( !client->response_buf ) return SPH_FALSE; return SPH_TRUE; } void sphinx_init_excerpt_options ( sphinx_excerpt_options * opts ) { if ( !opts ) return; opts->before_match = ""; opts->after_match = ""; opts->chunk_separator = " ... "; opts->html_strip_mode = "index"; opts->passage_boundary = "none"; opts->limit = 256; opts->limit_passages = 0; opts->limit_words = 0; opts->around = 5; opts->start_passage_id = 1; opts->exact_phrase = SPH_FALSE; opts->single_passage = SPH_FALSE; opts->use_boundaries = SPH_FALSE; opts->weight_order = SPH_FALSE; opts->query_mode = SPH_FALSE; opts->force_all_words = SPH_FALSE; opts->load_files = SPH_FALSE; opts->allow_empty = SPH_FALSE; opts->emit_zones = SPH_FALSE; } char ** sphinx_build_excerpts ( sphinx_client * client, int num_docs, const char ** docs, const char * index, const char * words, sphinx_excerpt_options * opts ) { sphinx_excerpt_options def_opt; int i, req_len, flags; char *buf, *req, *p, *pmax, **result; if ( !client || !docs || !index || !words || num_docs<=0 ) { if ( !docs ) set_error ( client, "invalid arguments (docs must not be empty)" ); else if ( !index ) set_error ( client, "invalid arguments (index must not be empty)" ); else if ( !words ) set_error ( client, "invalid arguments (words must not be empty)" ); else if ( num_docs<=0 ) set_error ( client, "invalid arguments (num_docs must be positive)" ); return NULL; } // fixup options if ( !opts ) { sphinx_init_excerpt_options ( &def_opt ); opts = &def_opt; } // alloc buffer req_len = (int)( 60 + strlen(index) + strlen(words) + safestrlen(opts->before_match) + safestrlen(opts->after_match) + safestrlen(opts->chunk_separator) + safestrlen(opts->html_strip_mode) + safestrlen(opts->passage_boundary) ); for ( i=0; iexact_phrase ) flags |= 2; if ( opts->single_passage ) flags |= 4; if ( opts->use_boundaries ) flags |= 8; if ( opts->weight_order ) flags |= 16; if ( opts->query_mode ) flags |= 32; if ( opts->force_all_words ) flags |= 64; if ( opts->load_files ) flags |= 128; if ( opts->allow_empty ) flags |= 256; if ( opts->emit_zones ) flags |= 512; send_int ( &req, 0 ); send_int ( &req, flags ); send_str ( &req, index ); send_str ( &req, words ); send_str ( &req, opts->before_match ); send_str ( &req, opts->after_match ); send_str ( &req, opts->chunk_separator ); send_int ( &req, opts->limit ); send_int ( &req, opts->around ); send_int ( &req, opts->limit_passages ); // v1.2 send_int ( &req, opts->limit_words ); send_int ( &req, opts->start_passage_id ); send_str ( &req, opts->html_strip_mode ); send_str ( &req, opts->passage_boundary ); send_int ( &req, num_docs ); for ( i=0; iresponse_start; pmax = client->response_start + client->response_len; // max position for checks, to protect against broken responses result = malloc ( (1+num_docs)*sizeof(char*) ); if ( !result ) { set_error ( client, "malloc() failed (bytes=%d)", (1+num_docs)*sizeof(char*) ); return NULL; } for ( i=0; i<=num_docs; i++ ) result[i] = NULL; for ( i=0; ipmax ) { for ( i=0; iresponse_len<4 ) { set_error ( client, "incomplete reply" ); return -1; } p = client->response_start; return unpack_int ( &p ); } int sphinx_update_attributes_mva ( sphinx_client * client, const char * index, const char * attr, sphinx_uint64_t docid, int num_values, const unsigned int * values ) { int i, req_len; char *buf, *req, *p; // check args if ( !client || !index || !attr || num_values<=0 || !values ) { if ( !index ) set_error ( client, "invalid arguments (index must not be empty)" ); else if ( !attr ) set_error ( client, "invalid arguments (attr must not empty)" ); else if ( num_values<=0 ) set_error ( client, "invalid arguments (num_values must be positive)" ); else if ( !values ) set_error ( client, "invalid arguments (values must not be empty)" ); } // alloc buffer req_len = (int)( 38 + safestrlen(index) + safestrlen(attr) + num_values*4 ); buf = malloc ( 12+req_len ); // request body length plus 12 header bytes if ( !buf ) { set_error ( client, "malloc() failed (bytes=%d)", req_len ); return -1; } // build request req = buf; send_word ( &req, SEARCHD_COMMAND_UPDATE ); send_word ( &req, VER_COMMAND_UPDATE ); send_int ( &req, req_len ); send_str ( &req, index ); send_int ( &req, 1 ); send_str ( &req, attr ); send_int ( &req, 1 ); // SPH_ATTR_MULTI flag send_int ( &req, 1 ); send_qword ( &req, docid ); send_int ( &req, num_values ); for ( i=0; iresponse_len<4 ) { set_error ( client, "incomplete reply" ); return -1; } p = client->response_start; return unpack_int ( &p ); } ////////////////////////////////////////////////////////////////////////// sphinx_keyword_info * sphinx_build_keywords ( sphinx_client * client, const char * query, const char * index, sphinx_bool hits, int * out_num_keywords ) { int i, req_len, nwords, len; char *buf, *req, *p, *pmax; sphinx_keyword_info *res; // check args if ( !client || !query || !index ) { if ( !query ) set_error ( client, "invalid arguments (query must not be empty)" ); else if ( !index ) set_error ( client, "invalid arguments (index must not be empty)" ); else if ( !out_num_keywords ) set_error ( client, "invalid arguments (out_num_keywords must not be null)" ); return NULL; } // alloc buffer req_len = (int)( safestrlen(query) + safestrlen(index) + 12 ); buf = malloc ( 12+req_len ); // request body length plus 12 header bytes if ( !buf ) { set_error ( client, "malloc() failed (bytes=%d)", req_len ); return NULL; } // build request req = buf; send_word ( &req, SEARCHD_COMMAND_KEYWORDS ); send_word ( &req, VER_COMMAND_KEYWORDS ); send_int ( &req, req_len ); send_str ( &req, query ); send_str ( &req, index ); send_int ( &req, hits ); // send query, get response if ( !net_simple_query ( client, buf, req_len ) ) return NULL; // parse response p = client->response_start; pmax = client->response_start + client->response_len; // max position for checks, to protect against broken responses nwords = unpack_int ( &p ); *out_num_keywords = nwords; len = nwords*sizeof(sphinx_keyword_info); res = (sphinx_keyword_info*) malloc ( len ); if ( !res ) { set_error ( client, "malloc() failed (bytes=%d)", len ); return NULL; } memset ( res, 0, len ); for ( i=0; iresponse_start; pmax = client->response_start + client->response_len; // max position for checks, to protect against broken responses *num_rows = unpack_int ( &p ); *num_cols = unpack_int ( &p ); n = (*num_rows)*(*num_cols); res = (char**) malloc ( n*sizeof(char*) ); for ( i=0; i, 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA # 02111-1307, USA. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try \`$0 --help' for more information" exit 1 fi run=: # In the cases where this matters, `missing' is being run in the # srcdir already. if test -f configure.ac; then configure_ac=configure.ac else configure_ac=configure.in fi msg="missing on your system" case "$1" in --run) # Try to run requested program, and just exit if it succeeds. run= shift "$@" && exit 0 # Exit code 63 means version mismatch. This often happens # when the user try to use an ancient version of a tool on # a file that requires a minimum version. In this case we # we should proceed has if the program had been absent, or # if --run hadn't been passed. if test $? = 63; then run=: msg="probably too old" fi ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an error status if there is no known handling for PROGRAM. Options: -h, --help display this help and exit -v, --version output version information and exit --run try to run the given command, and emulate it if it fails Supported PROGRAM values: aclocal touch file \`aclocal.m4' autoconf touch file \`configure' autoheader touch file \`config.h.in' automake touch all \`Makefile.in' files bison create \`y.tab.[ch]', if possible, from existing .[ch] flex create \`lex.yy.c', if possible, from existing .c help2man touch the output file lex create \`lex.yy.c', if possible, from existing .c makeinfo touch the output file tar try tar, gnutar, gtar, then tar without non-portable flags yacc create \`y.tab.[ch]', if possible, from existing .[ch] Send bug reports to ." exit 0 ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit 0 ;; -*) echo 1>&2 "$0: Unknown \`$1' option" echo 1>&2 "Try \`$0 --help' for more information" exit 1 ;; esac # Now exit if we have it, but it failed. Also exit now if we # don't have it and --version was passed (most likely to detect # the program). case "$1" in lex|yacc) # Not GNU programs, they don't have --version. ;; tar) if test -n "$run"; then echo 1>&2 "ERROR: \`tar' requires --run" exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then exit 1 fi ;; *) if test -z "$run" && ($1 --version) > /dev/null 2>&1; then # We have it, but it failed. exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then # Could not run --version or --help. This is probably someone # running `$TOOL --version' or `$TOOL --help' to check whether # $TOOL exists and not knowing $TOOL uses missing. exit 1 fi ;; esac # If it does not exist, or fails to run (possibly an outdated version), # try to emulate it. case "$1" in aclocal*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." touch aclocal.m4 ;; autoconf) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." touch configure ;; autoheader) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acconfig.h' or \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` test -z "$files" && files="config.h" touch_files= for f in $files; do case "$f" in *:*) touch_files="$touch_files "`echo "$f" | sed -e 's/^[^:]*://' -e 's/:.*//'`;; *) touch_files="$touch_files $f.in";; esac done touch $touch_files ;; automake*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." find . -type f -name Makefile.am -print | sed 's/\.am$/.in/' | while read f; do touch "$f"; done ;; autom4te) echo 1>&2 "\ WARNING: \`$1' is needed, but is $msg. You might have modified some files without having the proper tools for further handling them. You can get \`$1' as part of \`Autoconf' from any GNU archive site." file=`echo "$*" | sed -n 's/.*--output[ =]*\([^ ]*\).*/\1/p'` test -z "$file" && file=`echo "$*" | sed -n 's/.*-o[ ]*\([^ ]*\).*/\1/p'` if test -f "$file"; then touch $file else test -z "$file" || exec >$file echo "#! /bin/sh" echo "# Created by GNU Automake missing as a replacement of" echo "# $ $@" echo "exit 0" chmod +x $file exit 1 fi ;; bison|yacc) echo 1>&2 "\ WARNING: \`$1' $msg. You should only need it if you modified a \`.y' file. You may need the \`Bison' package in order for those modifications to take effect. You can get \`Bison' from any GNU archive site." rm -f y.tab.c y.tab.h if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.y) SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.c fi SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.h fi ;; esac fi if [ ! -f y.tab.h ]; then echo >y.tab.h fi if [ ! -f y.tab.c ]; then echo 'main() { return 0; }' >y.tab.c fi ;; lex|flex) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.l' file. You may need the \`Flex' package in order for those modifications to take effect. You can get \`Flex' from any GNU archive site." rm -f lex.yy.c if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.l) SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" lex.yy.c fi ;; esac fi if [ ! -f lex.yy.c ]; then echo 'main() { return 0; }' >lex.yy.c fi ;; help2man) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a dependency of a manual page. You may need the \`Help2man' package in order for those modifications to take effect. You can get \`Help2man' from any GNU archive site." file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then file=`echo "$*" | sed -n 's/.*--output=\([^ ]*\).*/\1/p'` fi if [ -f "$file" ]; then touch $file else test -z "$file" || exec >$file echo ".ab help2man is required to generate this page" exit 1 fi ;; makeinfo) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.texi' or \`.texinfo' file, or any other file indirectly affecting the aspect of the manual. The spurious call might also be the consequence of using a buggy \`make' (AIX, DU, IRIX). You might want to install the \`Texinfo' package or the \`GNU make' package. Grab either from any GNU archive site." file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then file=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` file=`sed -n '/^@setfilename/ { s/.* \([^ ]*\) *$/\1/; p; q; }' $file` fi touch $file ;; tar) shift # We have already tried tar in the generic part. # Look for gnutar/gtar before invocation to avoid ugly error # messages. if (gnutar --version > /dev/null 2>&1); then gnutar "$@" && exit 0 fi if (gtar --version > /dev/null 2>&1); then gtar "$@" && exit 0 fi firstarg="$1" if shift; then case "$firstarg" in *o*) firstarg=`echo "$firstarg" | sed s/o//` tar "$firstarg" "$@" && exit 0 ;; esac case "$firstarg" in *h*) firstarg=`echo "$firstarg" | sed s/h//` tar "$firstarg" "$@" && exit 0 ;; esac fi echo 1>&2 "\ WARNING: I can't seem to be able to run \`tar' with the given arguments. You may want to install GNU tar or Free paxutils, or check the command line arguments." exit 1 ;; *) echo 1>&2 "\ WARNING: \`$1' is needed, and is $msg. You might have modified some files without having the proper tools for further handling them. Check the \`README' file, it often tells you about the needed prerequisites for installing this package. You may also peek at any GNU archive site, in case some other package would contain this missing \`$1' program." exit 1 ;; esac exit 0 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: sphinx-2.0.4-release/api/libsphinxclient/README0000644000176700017710000000265611034764367020617 0ustar deogardeogarPure C searchd client API library Sphinx search engine, http://sphinxsearch.com/ API notes ---------- 1. API can either copy the contents of passed pointer arguments, or rely on the application that the pointer will not become invalid. This is controlled on per-client basis; see 'copy_args' argument to the sphinx_create() call. When 'copy_args' is true, API will create and manage a copy of every string and array passed to it. This causes additional malloc() pressure, but makes calling code easier to write. When 'copy_args' is false, API expects that pointers passed to sphinx_set_xxx() calls will still be valid at the time when sphinx_query() or sphinx_add_query() are called. Rule of thumb: when 'copy_args' is false, do not free query arguments until you have the search result. Example code for that case: VALID CODE: char * my_filter_name; my_filter_name = malloc ( 256 ); strncpy ( my_filter_name, "test", 256 ); sphinx_add_filter_range ( client, my_filter_name, 10, 20, false ); result = sphinx_query ( client ); free ( my_filter_name ); my_filter_name = NULL; INVALID CODE: void setup_my_filter ( sphinx_client * client ) { char buffer[256]; strncpy ( buffer, "test", sizeof(buffer) ); // INVALID! by the time when sphinx_query() is called, // buffer will be out of scope sphinx_add_filter_range ( client, buffer, 10, 20, false ); } setup_my_filter ( client ); result = sphinx_query ( client ); --eof-- sphinx-2.0.4-release/api/libsphinxclient/smoke_test.sh0000755000176700017710000000177211723113351022433 0ustar deogardeogar#!/bin/sh FAILLOG="/tmp/faillog1" DIFF='smoke_diff.txt' RES='smoke_test.txt' REF='smoke_ref.txt' SHELL='/bin/sh' LINE='-----------------------------\n' die() { cat $FAILLOG echo $LINE [ ! "z$2" = "z" ] && { eval $2; echo "$LINE"; } echo "C API:$1" [ -e "$FAILLOG" ] && rm $FAILLOG cmd "../../src/searchd -c smoke_test.conf --stop" exit 1 } cmd () { echo "Executing: $1\n">$FAILLOG eval $1 1>>$FAILLOG 2>&1 || die "$2" "$3" } cmd "$SHELL ./configure --with-debug" "configure failed" cmd "make clean" "make clean failed" cmd "make" "make failed" cmd "../../src/indexer -c smoke_test.conf --all" "indexing failed" cmd "../../src/searchd -c smoke_test.conf --test" "searchd start failed" cmd "sleep 1s" cmd "./test --smoke --port 10312>$RES" "test --smoke --port 10312 failed" cmd "../../src/searchd -c smoke_test.conf --stop" "searchd stop failed" cmd "make clean" " " cmd "diff --unified=3 $REF $RES >$DIFF" 'diff failed' "cat $DIFF" rm $RES rm $DIFF rm $FAILLOG echo "all ok" exit 0 sphinx-2.0.4-release/api/libsphinxclient/sphinxclient_config.h.in0000644000176700017710000000272711102461317024532 0ustar deogardeogar/* sphinxclient_config.h.in. Generated from configure.in by autoheader. */ /* Define to 1 if you have the header file. */ #undef HAVE_DLFCN_H /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the version of this package. */ #undef PACKAGE_VERSION /* debug build */ #undef SPHINXCLIENT_DEBUG /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Define to `unsigned' if does not define. */ #undef size_t sphinx-2.0.4-release/sphinx.spec0000644000176700017710000000150710765500461016137 0ustar deogardeogarSummary: Sphinx full-text search engine Name: sphinx Version: 0.9.9 Release: r1207 License: GPL Group: Development BuildRoot: /tmp/sphinx-%{version}-svn-%{release} Source: http://sphinxsearch.com/downloads/sphinx-%{version}-svn-%{release}.tar.gz URL: http://sphinxsearch.com/ BuildRequires: mysql-devel Requires: mysql %description Sphinx is a free, open-source full-text search engine, designed with indexing database content in mind. %prep %setup -n sphinx-%{version}-svn-%{release} %build CPPFLAGS="-D_FILE_OFFSET_BITS=64" export CPPFLAGS %configure make %install mkdir -p $RPM_BUILD_ROOT make DESTDIR=$RPM_BUILD_ROOT install mkdir -p $RPM_BUILD_ROOT/%{_libdir}/sphinx cp -R -p api $RPM_BUILD_ROOT/%{_libdir}/sphinx %files /usr/bin/* %{_libdir}/sphinx %config /etc/* %doc INSTALL doc/sphinx.* %clean rm -rf $RPM_BUILD_ROOT sphinx-2.0.4-release/test/0000755000176700017710000000000011724063141014721 5ustar deogardeogarsphinx-2.0.4-release/test/test_121/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_121/test.xml0000644000176700017710000000220311621517072017764 0ustar deogardeogar multiply stopwords source indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 stopwords = stopwords.txt stopwords_121.txt stopword_step = 0 } the very add adds "look and add books" "look add books" "look books" CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'the add' ), ( 2, 'add' ), ( 3, 'add is very plus' ), ( 4, 'the adds is very good' ), ( 5, 'look books together' ), ( 6, 'look add books together' ) sphinx-2.0.4-release/test/test_121/model.bin0000644000176700017710000000552611621517072020070 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"the";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"very";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"very";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"add";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"adds";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"adds";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"look";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"books";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:20:""look and add books"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"look";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"books";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:""look add books"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:4:"2607";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"look";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"books";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""look books"";}}}sphinx-2.0.4-release/test/test_052/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_052/test.xml0000644000176700017710000000660111611541474017777 0ustar deogardeogar before operator indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table where document_id in (1,2,3,4,5 ) } index test { source = test path = /test min_word_len = 1 } source test1 { type = mysql sql_query = SELECT * FROM test_table where document_id=6 } index test1 { source = test1 path = /test1 } source test2 { type = mysql sql_query = SELECT * FROM test_table where document_id in ( 10, 11, 12, 13, 14, 15 ) } index test2 { source = test2 path = /test2 } CREATE TABLE test_table ( document_id INT NOT NULL, title VARCHAR(255) NOT NULL, text VARCHAR(4096) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table ( document_id, title, text ) VALUES ( 1, 'aaa bbb', 'ccc ddd' ), ( 2, 'xxx', 'ccc ddd eee fff ggg' ), ( 3, 'yyy', 'one one one two three' ), ( 4, 'zzz', 'one two three one three one two four one two three four' ), ( 5, '', 'a b c d e f g' ), ( 6, '', 'h1 h2 h3 h4 h5' ), ( 10, '', 'hi' ), ( 11, '', 'hi' ), ( 12, '', 'zi' ), ( 13, '', CONCAT(REPEAT('vi mi ', 530), ' vi mi hi' ) ), ( 14, '', 'vi mi' ), ( 15, '', 'lo' ); sphinx-2.0.4-release/test/test_052/model.bin0000644000176700017710000003542711616617000020072 0ustar deogardeogara:1:{i:0;a:28:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:" aaa << ccc ";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"bbb";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" aaa << bbb << ccc ";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" aaa << ccc << ddd ";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:" ccc << ddd ";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2529";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"fff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" ccc << eee << fff ";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2529";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" ccc << ddd << ggg ";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"xxx";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" ccc << ddd << xxx ";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" eee << ddd << ggg ";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"3549";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2546";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:" one << two << three ";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"2574";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1569";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:" one << three ";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"2574";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:" one << one << three ";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:4:"3569";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1574";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:" one << one << one << three ";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1574";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.004";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:35:" one << one << one << one << three ";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"4537";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:" one << two << three << four ";}i:14;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:" "a b c" << b << c << d ";}i:15;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.003";s:5:"words";a:5:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:" "a b c" << c << d << e ";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"3602";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:6:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"g";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:" "a b c" << e << f << g ";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"4540";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:5:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:" a << "b c d" << e ";}i:18;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.004";s:5:"words";a:6:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:" "a b c d" << "d e f" ";}i:19;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"4616";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.004";s:5:"words";a:7:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"g";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:" "a b c d" << "e f g" ";}i:20;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1594";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"1521";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:4:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:34:" (ccc | "ddd eee") << (ddd | ggg) ";}i:21;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"ccc";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:" ccc << ddd$ ";}i:22;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"2546";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:" ^one << two << three$ ";}i:23;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"5546";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"7";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:36:" ^one << "one one" << two << three$ ";}i:24;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1568";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"bbb";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:20:" "zzz aaa"/1 << bbb ";}i:25;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:20:" "zzz aaa"/1 << ddd ";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"3500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.004";s:5:"words";a:5:{s:2:"h1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"h2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"h3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"h4";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"h5";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:72:" ("h1 h2" NEAR/5 ( h3 | h4 | h5) ) << ("h1 h2" NEAR/5 ( h3 | h4 | h5) ) ";}i:27;a:3:{s:8:"sphinxql";s:87:" select * from test2 where match ( ' hi | ( vi << mi )' ) and id in ( 10, 11, 12, 15 ) ";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1511";}i:1;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1511";}}}}}sphinx-2.0.4-release/test/test_187/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_187/test.xml0000644000176700017710000000517111721250154020003 0ustar deogardeogar ATTACH INDEX indexer { mem_limit = 16M } searchd { workers = threads } source disk { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index disk { source = disk path = /disk charset_table = a..z, A..Z->a..z } index rt { type = rt path = data/rt rt_attr_uint = gid1 rt_attr_uint = gid2 rt_field = title rt_field = content charset_table = a..z, A..Z->a..z, _ } source src_plain { type = mysql sql_query = select id, title, 'dummy for' as str1, '55, 15, 20' as mva1, gid from test_table sql_attr_string = str1 sql_attr_multi = uint mva1 from field sql_attr_uint = gid } index plain { source = src_plain path = /plain charset_type = utf-8 } index rt_arena { type = rt path = data/rt_arena rt_field = title rt_attr_string = str1 rt_attr_multi = mva1 rt_attr_uint = gid charset_type = utf-8 } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 11, 'Fare thee well' ), ( 2, 11, 'And if for ever' ), ( 3, 11, 'Still for ever fare thee well' ), ( 4, 11, 'under_score' ) SELECT * FROM rt SELECT * FROM disk SELECT * FROM disk WHERE MATCH('thee') SELECT * FROM disk WHERE MATCH('under') ATTACH INDEX disk TO RTINDEX rt SELECT * FROM rt SELECT * FROM disk SELECT * FROM rt WHERE MATCH('thee') DESC rt INSERT INTO rt ( id, gid, title ) VALUES ( 10, 22, 'I dub thee unforgiven' ) SELECT * FROM rt WHERE MATCH('thee') INSERT INTO rt ( id, gid, title ) VALUES ( 11, 22, 'under_score_again' ) SELECT * FROM rt WHERE MATCH('under') SELECT * FROM rt_arena ATTACH INDEX plain TO RTINDEX rt_arena SELECT * FROM rt_arena where mva1=15 order by id asc SELECT * FROM rt_arena where mva1=55 order by id desc SELECT * FROM rt_arena sphinx-2.0.4-release/test/test_187/model.bin0000644000176700017710000001120411721250154020066 0ustar deogardeogara:1:{i:0;a:18:{i:0;a:2:{s:8:"sphinxql";s:16:"SELECT * FROM rt";s:10:"total_rows";i:0;}i:1;a:3:{s:8:"sphinxql";s:18:"SELECT * FROM disk";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}}}i:2;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM disk WHERE MATCH('thee')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}}}i:3;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM disk WHERE MATCH('under')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1695";s:3:"gid";s:2:"11";}}}i:4;a:2:{s:8:"sphinxql";s:31:"ATTACH INDEX disk TO RTINDEX rt";s:14:"total_affected";i:0;}i:5;a:3:{s:8:"sphinxql";s:16:"SELECT * FROM rt";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}}}i:6;a:3:{s:8:"sphinxql";s:18:"SELECT * FROM disk";s:5:"error";s:34:"no enabled local indexes to search";s:5:"errno";i:1064;}i:7;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('thee')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}}}i:8;a:3:{s:8:"sphinxql";s:7:"DESC rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:5:"Field";s:2:"id";s:4:"Type";s:7:"integer";}i:1;a:2:{s:5:"Field";s:5:"title";s:4:"Type";s:5:"field";}i:2;a:2:{s:5:"Field";s:3:"gid";s:4:"Type";s:4:"uint";}}}i:9;a:2:{s:8:"sphinxql";s:76:"INSERT INTO rt ( id, gid, title ) VALUES ( 10, 22, 'I dub thee unforgiven' )";s:14:"total_affected";i:1;}i:10;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('thee')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1704";s:3:"gid";s:2:"22";}i:1;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";s:3:"gid";s:2:"11";}}}i:11;a:2:{s:8:"sphinxql";s:72:"INSERT INTO rt ( id, gid, title ) VALUES ( 11, 22, 'under_score_again' )";s:14:"total_affected";i:1;}i:12;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('under')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1709";s:3:"gid";s:2:"22";}i:1;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1695";s:3:"gid";s:2:"11";}}}i:13;a:2:{s:8:"sphinxql";s:22:"SELECT * FROM rt_arena";s:10:"total_rows";i:0;}i:14;a:2:{s:8:"sphinxql";s:38:"ATTACH INDEX plain TO RTINDEX rt_arena";s:14:"total_affected";i:0;}i:15;a:3:{s:8:"sphinxql";s:52:"SELECT * FROM rt_arena where mva1=15 order by id asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}}}i:16;a:3:{s:8:"sphinxql";s:53:"SELECT * FROM rt_arena where mva1=55 order by id desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:2;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:3;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}}}i:17;a:3:{s:8:"sphinxql";s:22:"SELECT * FROM rt_arena";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";s:4:"str1";s:9:"dummy for";s:4:"mva1";s:8:"15,20,55";}}}}}sphinx-2.0.4-release/test/test_092/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_092/test.xml0000644000176700017710000000440511421075337020002 0ustar deogardeogar RT: re-insert deleted row, keyword weights vs different insert scenarios indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test92 { type = rt path = data/test92 rt_attr_uint = idd rt_field = content } index test102 { type = rt path = data/test102 rt_attr_uint = idd rt_field = content } index test104 { type = rt path = data/test104 rt_attr_uint = idd rt_field = content } index test105 { type = rt path = data/test105 rt_attr_uint = idd rt_field = content } insert into test92 (id,content) values (1,'content'),(2,'you') delete from test92 where id=1 select * from test92 insert into test92 (id,content) values (1,'newcontent') select * from test92 where match('you') select * from test92 where match('content') select * from test92 where match('newcontent') insert into test102 (id,content) values (1,'content'),(2,'newcontent') delete from test102 where id=1 insert into test102 (id,content) values (1,'bla') select * from test102 where match('bla') select * from test102 where match('newcontent') insert into test102 (id,content) values (3,'content2') select * from test102 where match('bla') select * from test102 where match('content2') select * from test102 where match('newcontent') insert into test104 (id,content) values (1,'word1'),(2,'word2') insert into test104 (id,content) values (3,'word3') select * from test104 where match('word1|word3') insert into test105 (id,content) values (1,'content'),(2,'newcontent') replace into test105 (id,content) values (1,'contend') select * from test105 where match('contend|newcontent') select * from test105 where match('content') sphinx-2.0.4-release/test/test_092/model.bin0000644000176700017710000000655311455516446020111 0ustar deogardeogara:1:{i:0;a:23:{i:0;a:2:{s:8:"sphinxql";s:62:"insert into test92 (id,content) values (1,'content'),(2,'you')";s:14:"total_affected";i:2;}i:1;a:2:{s:8:"sphinxql";s:29:"delete from test92 where id=1";s:14:"total_affected";i:0;}i:2;a:3:{s:8:"sphinxql";s:20:"select * from test92";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:3;a:2:{s:8:"sphinxql";s:55:"insert into test92 (id,content) values (1,'newcontent')";s:14:"total_affected";i:1;}i:4;a:3:{s:8:"sphinxql";s:39:"select * from test92 where match('you')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1643";s:3:"idd";s:1:"0";}}}i:5;a:2:{s:8:"sphinxql";s:43:"select * from test92 where match('content')";s:10:"total_rows";i:0;}i:6;a:3:{s:8:"sphinxql";s:46:"select * from test92 where match('newcontent')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";s:3:"idd";s:1:"0";}}}i:7;a:2:{s:8:"sphinxql";s:70:"insert into test102 (id,content) values (1,'content'),(2,'newcontent')";s:14:"total_affected";i:2;}i:8;a:2:{s:8:"sphinxql";s:30:"delete from test102 where id=1";s:14:"total_affected";i:0;}i:9;a:2:{s:8:"sphinxql";s:49:"insert into test102 (id,content) values (1,'bla')";s:14:"total_affected";i:1;}i:10;a:3:{s:8:"sphinxql";s:40:"select * from test102 where match('bla')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";s:3:"idd";s:1:"0";}}}i:11;a:3:{s:8:"sphinxql";s:47:"select * from test102 where match('newcontent')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1643";s:3:"idd";s:1:"0";}}}i:12;a:2:{s:8:"sphinxql";s:54:"insert into test102 (id,content) values (3,'content2')";s:14:"total_affected";i:1;}i:13;a:3:{s:8:"sphinxql";s:40:"select * from test102 where match('bla')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"idd";s:1:"0";}}}i:14;a:3:{s:8:"sphinxql";s:45:"select * from test102 where match('content2')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1680";s:3:"idd";s:1:"0";}}}i:15;a:3:{s:8:"sphinxql";s:47:"select * from test102 where match('newcontent')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1680";s:3:"idd";s:1:"0";}}}i:16;a:2:{s:8:"sphinxql";s:63:"insert into test104 (id,content) values (1,'word1'),(2,'word2')";s:14:"total_affected";i:2;}i:17;a:2:{s:8:"sphinxql";s:51:"insert into test104 (id,content) values (3,'word3')";s:14:"total_affected";i:1;}i:18;a:3:{s:8:"sphinxql";s:48:"select * from test104 where match('word1|word3')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1590";s:3:"idd";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1590";s:3:"idd";s:1:"0";}}}i:19;a:2:{s:8:"sphinxql";s:70:"insert into test105 (id,content) values (1,'content'),(2,'newcontent')";s:14:"total_affected";i:2;}i:20;a:2:{s:8:"sphinxql";s:54:"replace into test105 (id,content) values (1,'contend')";s:14:"total_affected";i:1;}i:21;a:3:{s:8:"sphinxql";s:55:"select * from test105 where match('contend|newcontent')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1571";s:3:"idd";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1571";s:3:"idd";s:1:"0";}}}i:22;a:2:{s:8:"sphinxql";s:44:"select * from test105 where match('content')";s:10:"total_rows";i:0;}}}sphinx-2.0.4-release/test/test_154/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_154/stopwords.txt0000644000176700017710000000002111456041426021073 0ustar deogardeogara dog cat eatssphinx-2.0.4-release/test/test_154/test.xml0000644000176700017710000000433511456041426020003 0ustar deogardeogar no hit documents in RT and plain index indexer { mem_limit = 16M } searchd { workers = threads } source src { type = mysql sql_query = SELECT id, idd1, body FROM test_table sql_attr_uint = idd1 } index plain { source = src docinfo = extern charset_type = utf-8 path = /plain stopwords = test_154/stopwords.txt } index rt { type = rt docinfo = extern charset_type = utf-8 path = /rt stopwords = test_154/stopwords.txt rt_attr_uint = idd1 rt_field = body rt_mem_limit = 8M } index both { type = distributed local = plain local = rt } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd1` int(11) NOT NULL default '0', `body` varchar(1024) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 11, 'a dog' ), ( 2, 11, 'a cat' ), ( 3, 11, 'a bird' ), ( 4, 12, 'cat eats bird' ), ( 5, 13, 'dog eats cat' ), ( 6, 14, 'bird' ) insert into rt (id, idd1, body) values ( 11, 14, 'bird' ) insert into rt (id, idd1, body) values ( 12, 13, 'dog eats cat' ) insert into rt (id, idd1, body) values ( 13, 12, 'cat eats bird' ) insert into rt (id, idd1, body) values ( 14, 11, 'a bird' ) insert into rt (id, idd1, body) values ( 15, 11, 'a cat' ) insert into rt (id, idd1, body) values ( 16, 11, 'a dog' ) select * from plain select * from rt select * from both select * from both where match ('a') select * from both where match ('bird') select * from both where idd1=11 select * from both where idd1!=11 select * from both where match ('bird') and idd1!=11 select * from both where idd1!=10 sphinx-2.0.4-release/test/test_154/model.bin0000644000176700017710000001303311456041426020067 0ustar deogardeogara:1:{i:0;a:15:{i:0;a:2:{s:8:"sphinxql";s:57:"insert into rt (id, idd1, body) values ( 11, 14, 'bird' )";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:65:"insert into rt (id, idd1, body) values ( 12, 13, 'dog eats cat' )";s:14:"total_affected";i:1;}i:2;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, idd1, body) values ( 13, 12, 'cat eats bird' )";s:14:"total_affected";i:1;}i:3;a:2:{s:8:"sphinxql";s:59:"insert into rt (id, idd1, body) values ( 14, 11, 'a bird' )";s:14:"total_affected";i:1;}i:4;a:2:{s:8:"sphinxql";s:58:"insert into rt (id, idd1, body) values ( 15, 11, 'a cat' )";s:14:"total_affected";i:1;}i:5;a:2:{s:8:"sphinxql";s:58:"insert into rt (id, idd1, body) values ( 16, 11, 'a dog' )";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:19:"select * from plain";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}}}i:7;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:1;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:2;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:3;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:4;a:3:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:5;a:3:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}}}i:8;a:3:{s:8:"sphinxql";s:18:"select * from both";s:10:"total_rows";i:12;s:4:"rows";a:12:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:6;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:7;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:8;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:9;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:10;a:3:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:11;a:3:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}}}i:9;a:2:{s:8:"sphinxql";s:36:"select * from both where match ('a')";s:10:"total_rows";i:0;}i:10;a:3:{s:8:"sphinxql";s:39:"select * from both where match ('bird')";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"12";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"14";}i:3;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"14";}i:4;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"12";}i:5;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"11";}}}i:11;a:3:{s:8:"sphinxql";s:32:"select * from both where idd1=11";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:3;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:4;a:3:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:5;a:3:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}}}i:12;a:3:{s:8:"sphinxql";s:33:"select * from both where idd1!=11";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:1;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:3;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:4;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:5;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}}}i:13;a:3:{s:8:"sphinxql";s:52:"select * from both where match ('bird') and idd1!=11";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"12";}i:1;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"14";}i:2;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"14";}i:3;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1533";s:4:"idd1";s:2:"12";}}}i:14;a:3:{s:8:"sphinxql";s:33:"select * from both where idd1!=10";s:10:"total_rows";i:12;s:4:"rows";a:12:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:6;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"14";}i:7;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";}i:8;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";}i:9;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:10;a:3:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}i:11;a:3:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";}}}}}sphinx-2.0.4-release/test/test_058/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_058/test.xml0000644000176700017710000000456011707510237020006 0ustar deogardeogar rotation searchd { seamless_rotate = 0 workers = none workers = threads binlog_path = } source sql { type = mysql sql_query = select id, text from test_table where mode = (select mode from test_table where id = 1000); } index index { source = sql path = /index } source locs { type = mysql sql_query = select id, text from test_table; } index loc1 { source = locs path = /loc1 } index loc2 { source = locs path = /loc2 } index loc3 { source = locs path = /loc3 } drop table if exists test_table create table test_table ( id int not null, text varchar(255) not null, mode int ); insert into test_table values ( 1, 'first', 1 ), ( 2, 'second', 1 ), ( 3, 'third', 1 ), ( 4, 'fourth', 1 ), ( 5, 'fifth', 1 ), ( 1, 'one', 2 ), ( 2, 'two', 2 ), ( 3, 'three', 2 ), ( 1000, '', 1 ); Query ( $words, "index" ); if ( $result ) { unset ( $result["time"] ); return $result; } else return $client->GetLastError(); '); $mquery = create_function('$client',' $client->AddQuery ( "second", "loc1" ); $client->AddQuery ( "fifth", "loc2" ); $client->AddQuery ( "third", "loc3" ); $res = $client->RunQueries (); if ( !$res ) return $client->GetLastError(); $result = array (); for ( $i=0; $i sphinx-2.0.4-release/test/test_058/model.bin0000644000176700017710000002116211707510237020074 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:9:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:2;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:5;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}i:6;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:8;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}i:1;a:1:{i:0;a:9:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:5;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}i:6;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:8;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"fifth";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"third";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}}sphinx-2.0.4-release/test/clean.cmd0000644000176700017710000000062511430744557016506 0ustar deogardeogar@echo off if exist "ubertest.php" ( for /D %%i in (test_*) do ( if exist "%%i\test.xml" ( del /q "%%i\report.txt" 2>nul rmdir /s /q "%%i\Conf" 2>nul ) ) del /q "data\*.sp*" 2>nul del /q "data\*.mvp" 2>nul del /q "data\*.meta" "data\*.lock" "data\*.kill" "data\*.ram" "data\binlog.*" 2>nul del /q "*.log" 2>nul del /q "error*.txt" 2>nul del /q "config*.conf" 2>nul ) sphinx-2.0.4-release/test/test_039/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_039/test.xml0000644000176700017710000000467111637101651020006 0ustar deogardeogar distributed updates indexer { mem_limit = 16M } searchd { max_filter_values = 500000 max_packet_size = 64M dist_threads = 4 read_timeout = 1 } source src1 { type = mysql sql_query = SELECT id, group_id, title FROM test_table sql_attr_uint = group_id } source src2 : src1 { sql_query = SELECT 10+id, group_id, title FROM test_table } index block1 { source = src1 path = /block1 docinfo = extern charset_type = utf-8 min_word_len = 1 } index block2 { source = src2 path = /block2 docinfo = extern charset_type = utf-8 min_word_len = 1 } index dist_agent_2 { source = src1 path = /dist_agent_2 docinfo = extern charset_type = utf-8 } index dist { type = distributed local = block1 agent = :block2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist2 { type = distributed agent = :dist_agent_2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } 2 Query ( "", "dist" ); if ( !$res ) { $results[] = $client->GetLastError(); return; } unset ( $res["time"] ); $results[] = $res; $res = $client->UpdateAttributes ( "dist", array("group_id"), array(11=>array(123)) ); if ( $res===false ) { $results[] = $client->GetLastError(); return; } $results[] = $res; $res = $client->Query ( "", "dist" ); if ( !$res ) { $results[] = $client->GetLastError(); return; } unset ( $res["time"] ); $results[] = $res; // here is going regression master fails to send 4k attributes to agent $gid = array(); for ( $i=0; $i<50000; $i++ ) $gid[] = $i; $client->SetFilter ( 'group_id', $gid ); $res = $client->Query ( "", "dist2" ); if ( !$res ) { $results[] = $client->GetLastError(); return; } unset ( $res["time"] ); $results[] = $res; ]]> CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, group_id INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table ( id, group_id, title ) VALUES ( 1, 1, 'test one' ), ( 2, 1, 'test two' ), ( 3, 2, 'test three' ), ( 4, 2, 'test four' ) sphinx-2.0.4-release/test/test_039/model.bin0000644000176700017710000000406611637101651020075 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:4:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"group_id";i:1;}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";}i:1;i:1;i:2;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"group_id";i:1;}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:3:"123";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"group_id";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"group_id";s:1:"2";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";}}}}sphinx-2.0.4-release/test/test_078/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_078/test.xml0000644000176700017710000000226711235564071020013 0ustar deogardeogar merge vs hit mixing indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM test1 } source srcdelta { type = mysql sql_query = SELECT * FROM test2 } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta aaa aaa ccc yyy ddd CREATE TABLE test1 ( id INTEGER NOT NULL, body VARCHAR(255) NOT NULL ); CREATE TABLE test2 ( id INTEGER NOT NULL, body VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test1; DROP TABLE IF EXISTS test2; INSERT INTO test1 VALUES ( 1, 'xxx aaa yyy' ); INSERT INTO test2 VALUES ( 1, 'aaa bbb ccc aaa ddd eee' ); sphinx-2.0.4-release/test/test_078/model.bin0000644000176700017710000000243111235564071020075 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"aaa";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"aaa ccc";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"yyy";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"yyy ddd";}}}sphinx-2.0.4-release/test/test_023/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_023/test.xml0000644000176700017710000000472611004433405017771 0ustar deogardeogar min_word_len vs queries (part 1) indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 enable_star = 1 min_word_len = 3 min_word_len = 4 min_word_len = 5 min_prefix_len = 3 min_prefix_len = 4 min_prefix_len = 5 } a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* hello world hello me world hello two world hello four world hello me* world hello two* world hello four* world CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'a' ), ( 2, 'bb' ), ( 3, 'ccc' ), ( 4, 'dddd' ), ( 5, 'eeeee' ), ( 6, 'ffffff' ), ( 7, 'ggggggg' ), ( 8, 'hello world' ), ( 9, 'hello a world' ), ( 10, 'hello aa world' ), ( 11, 'hello aaa world' ), ( 12, 'hello aaaa world' ), ( 13, 'hello aaaaa world' ), ( 14, 'hello me world' ), ( 15, 'hello two world' ), ( 16, 'hello four world' ) sphinx-2.0.4-release/test/test_023/model.bin0000644000176700017710000032764611004433405020073 0ustar deogardeogara:9:{i:0;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:1;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:2;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:3;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:4;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:5;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:6;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:7;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:8;a:33:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:11:"hello world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:29;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:32;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}}sphinx-2.0.4-release/test/test_126/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_126/test.xml0000644000176700017710000000201511405471550017772 0ustar deogardeogar expand keywords indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 morphology = stem_en index_exact_words = 1 expand_keywords = 1 } dog run =dog =run dog run =dog =run dog* run CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'dog runs' ), ( 2, 'dogs run' ), ( 3, 'dogs running' ), ( 4, 'dog run' ) sphinx-2.0.4-release/test/test_126/model.bin0000644000176700017710000000614011405471550020066 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:4;a:2:{s:6:"weight";s:4:"4430";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3416";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3416";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2402";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.007";s:5:"words";a:4:{s:3:"dog";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"run";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:4:"=dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"=run";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"dog run";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:5:"words";a:2:{s:4:"=dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"=run";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"=dog =run";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.008";s:5:"words";a:4:{s:3:"dog";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"run";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:4:"=dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"=run";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"dog run";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:5:"words";a:4:{s:4:"=dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"=run";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"==dog";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"==run";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"=dog =run";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.008";s:5:"words";a:4:{s:3:"dog";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"run";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:4:"=dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"=run";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"dog* run";}}}sphinx-2.0.4-release/test/test_149/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_149/test.xml0000644000176700017710000000341611654505324020010 0ustar deogardeogar MySQL IN vs filter order indexer { mem_limit = 16M } searchd { workers = threads } source test { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd } index test { source = test path = /test docinfo = extern } CREATE TABLE test_table ( id INT NOT NULL, body VARCHAR(2048) NOT NULL, idd INT NOT NULL ); drop table if exists test_table; insert into test_table values ( 1, 'test', 10 ); insert into test_table values ( 2, 'test', 20 ); insert into test_table values ( 3, 'test', 30 ); insert into test_table values ( 4, 'test', 40 ); insert into test_table values ( 5, 'test', 50 ); insert into test_table values ( 6, 'test', 60 ); insert into test_table values ( 7, 'test', 70 ); select * from test select * from test where @id IN (1,3,6,7) select * from test where @id IN (1,1000,3,6,7) select * from test where @id IN (1000,1,3,6,7) select * from test where @id IN (1,3,6,1000,7) select * from test where idd IN (875,321,60,1010,20,457,20,311,20,750,70,10) select * from test where idd NOT IN (60,1010,20,457,20,311,20,750,70,10) set global @attr_filter=(10,20,30,70) select * from test where idd IN @attr_filter select * from test where idd NOT IN @attr_filter sphinx-2.0.4-release/test/test_149/model.bin0000644000176700017710000000713511654505324020103 0ustar deogardeogara:1:{i:0;a:10:{i:0;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:7;s:4:"rows";a:7:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"idd";s:2:"40";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"idd";s:2:"50";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:1;a:3:{s:8:"sphinxql";s:41:"select * from test where @id IN (1,3,6,7)";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:2;a:3:{s:8:"sphinxql";s:46:"select * from test where @id IN (1,1000,3,6,7)";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:3;a:3:{s:8:"sphinxql";s:46:"select * from test where @id IN (1000,1,3,6,7)";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:4;a:3:{s:8:"sphinxql";s:46:"select * from test where @id IN (1,3,6,1000,7)";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:5;a:3:{s:8:"sphinxql";s:76:"select * from test where idd IN (875,321,60,1010,20,457,20,311,20,750,70,10)";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:6;a:3:{s:8:"sphinxql";s:72:"select * from test where idd NOT IN (60,1010,20,457,20,311,20,750,70,10)";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:1;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"idd";s:2:"40";}i:2;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"idd";s:2:"50";}}}i:7;a:2:{s:8:"sphinxql";s:37:"set global @attr_filter=(10,20,30,70)";s:14:"total_affected";i:0;}i:8;a:3:{s:8:"sphinxql";s:44:"select * from test where idd IN @attr_filter";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:2:"30";}i:3;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:2:"70";}}}i:9;a:3:{s:8:"sphinxql";s:48:"select * from test where idd NOT IN @attr_filter";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"idd";s:2:"40";}i:1;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"idd";s:2:"50";}i:2;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:2:"60";}}}}}sphinx-2.0.4-release/test/test_082/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_082/words_delta.txt0000644000176700017710000000001711243776120021343 0ustar deogardeogarwoodman the of sphinx-2.0.4-release/test/test_082/words_main.txt0000644000176700017710000000001711243776120021176 0ustar deogardeogartin woodman a sphinx-2.0.4-release/test/test_082/test.xml0000644000176700017710000000343011323414741017773 0ustar deogardeogar hitless merge indexer { mem_limit = 16M } searchd { } source main { type = mysql sql_query = select * from sph_test where id in ( 1, 2 ); } source delta { type = mysql sql_query = select * from sph_test where id in ( 3 ); } index main { source = main path = /main hitless_words = /words_main.txt } index delta { source = delta path = /delta hitless_words = /words_delta.txt } --merge main delta create table sph_test ( id int not null, text varchar(255) not null ); drop table if exists sph_test; insert into sph_test values ( 1, 'The Tin Woodman gave a sigh of satisfaction and lowered his axe, which he leaned against the tree.' ), ( 2, 'The Tin Woodman appeared to think deeply for a moment.' ), ( 3, 'The Tin Woodman had asked Dorothy to put the oil-can in her basket.' ); tin woodman lowered "tin woodman" "and lowered" "tin woodman"~2 "lowered his axe" "and lowered his axe" "tin woodman"/1 ^the ^tin basket basket$ sphinx-2.0.4-release/test/test_082/model.bin0000644000176700017710000001304311243776120020070 0ustar deogardeogara:1:{i:0;a:12:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"lowered";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""tin woodman"";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"and";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""and lowered"";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"~2";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""lowered his axe"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"4680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"and";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""and lowered his axe"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"/1";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^the";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^tin";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"basket";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"basket$";}}}sphinx-2.0.4-release/test/test_128/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_128/test.xml0000644000176700017710000000220411406443065017775 0ustar deogardeogar indexing ranged stored procedures indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query_range = SELECT 1, 3 sql_range_step = 1 sql_query = CALL test_proc($start+$end-$end-1) sql_attr_uint = gid } index test { source = test path = /test } create procedure test_proc (in base int) begin select * from test_table where id=2*base or id=2*base+1; end create table test_table ( id int not null, gid int not null, title varchar(255) not null ); drop procedure if exists test_proc; drop table if exists test_table; insert into test_table values ( 1, 100, 'hello one' ); insert into test_table values ( 2, 200, 'hello two' ); insert into test_table values ( 3, 300, 'hello three' ); insert into test_table values ( 4, 400, 'hello four' ); hello sphinx-2.0.4-release/test/test_128/model.bin0000644000176700017710000000123111406443065020065 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:3:"gid";s:3:"100";}}i:2;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:3:"gid";s:3:"200";}}i:3;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:3:"gid";s:3:"300";}}i:4;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:3:"gid";s:3:"400";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.012";s:5:"words";a:1:{s:5:"hello";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"hello";}}}sphinx-2.0.4-release/test/wordforms.txt0000644000176700017710000000017411500145433017503 0ustar deogardeogarfolded > wrapped shortform > SomeWhatMoreLongFormThatWouldOverflowShortFormBuffer antediluvian clumsy shandrydan > fordt sphinx-2.0.4-release/test/test_015/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_015/stops.txt0000644000176700017710000000000311616617165020202 0ustar deogardeogar2 sphinx-2.0.4-release/test/test_015/test.xml0000644000176700017710000000301611616617000017765 0ustar deogardeogar phrase matching vs duplicate keywords indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id, body FROM test_table } index test { source = srctest path = /test charset_type = utf-8 charset_table = 0..9, A..Z->a..z, _, a..z ngram_chars = U+410..U+42F->U+430..U+44F, U+430..U+44F ngram_len = 1 stopwords = /stops.txt } bar baz bar foo bar baz bar i did it zee lord of zee rings "bar baz bar" "foo bar baz bar" "i did it" "zee lord of zee rings" Braun 370\-2 3702 370 2 Braun Series 3 370 Shaver CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 111, 'lets test foo bar baz bar stuff' ), ( 222, 'bar baz foo' ), ( 333, 'foo baz bar' ), ( 444, 'i i did it it' ), ( 555, 'zee lord of zee rings' ), ( 666, 'Braun 370-2 3702 370 2 Braun Series 3 - 370 Shaver') sphinx-2.0.4-release/test/test_015/model.bin0000644000176700017710000001135311616617000020061 0ustar deogardeogara:1:{i:0;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:111;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"baz";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"bar";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"bar baz bar";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:111;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"foo";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"baz";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"bar";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:"foo bar baz bar";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:444;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:1:"i";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:3:"did";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"it";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"i did it";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:555;a:2:{s:6:"weight";s:1:"5";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:4:{s:4:"lord";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"of";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"zee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:5:"rings";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:"zee lord of zee rings";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:111;a:2:{s:6:"weight";s:4:"3539";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"baz";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"bar";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""bar baz bar"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:111;a:2:{s:6:"weight";s:4:"4537";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"foo";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"baz";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"bar";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""foo bar baz bar"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:444;a:2:{s:6:"weight";s:4:"3761";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:1:"i";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:3:"did";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"it";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""i did it"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:555;a:2:{s:6:"weight";s:4:"5728";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:4:{s:4:"lord";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"of";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"zee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:5:"rings";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:""zee lord of zee rings"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:666;a:2:{s:6:"weight";s:4:"9742";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:5:"words";a:6:{i:3702;a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"braun";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:6:"series";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}i:3;a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}i:370;a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:6:"shaver";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:49:"Braun 370\-2 3702 370 2 Braun Series 3 370 Shaver";}}}sphinx-2.0.4-release/test/test_046/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_046/test.xml0000644000176700017710000000542711605620330017777 0ustar deogardeogar mva filtering indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT id, text, mva FROM test_table; sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field } index test { source = test path = /test } CREATE TABLE test_table ( id INTEGER NOT NULL, text VARCHAR(255) NOT NULL DEFAULT 'text', mva VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS test_table INSERT INTO test_table (id, mva) VALUES ( 10, '' ), ( 1, '100' ), ( 2, '100, 200' ), ( 4, '6, 12, 17, 20') ResetFilters(); $client->SetFilter ( 'mva', $mva ); $result = $client->Query ( '' ); if ( !$result ) break; $len = array_key_exists ( 'matches', $result ) ? count ( $result['matches'] ) : 0; $pass = $len == $filter[$last] ? 1 : 0; if ( $len == 1) $pass = $pass and array_key_exists ( $ID, $result['matches'] ); $results [$key] [ join(', ', $mva ) ] = $pass ? 'OK' : 'FAILED'; } // interval $results [] = 'INTERVAL'; $results [] = array(); $filters = array( array( 1, 4, false), array( 1, 5, false), array( 1, 6, true), array( 1, 7, true), array( 1, 12, true), array( 1, 15, true), array( 6, 9, true), array( 7, 9, false), array( 8, 10, false), array( 9, 12, true), array( 9, 15, true), array( 9, 21, true), array( 16, 21, true), array( 17, 21, true), array( 18, 19, false), array( 18, 25, true), array( 19, 25, true), array( 20, 25, true), array( 21, 25, false), ); $key = count($results) - 1; foreach ( $filters as &$filter ) { $client->ResetFilters(); $client->SetFilterRange ( 'mva', $filter[0], $filter[1] ); $result = $client->Query ( '' ); if ( !$result ) break; $len = array_key_exists ( 'matches', $result ) ? count ( $result['matches'] ) : 0; $pass = $len == ( $filter[2] ? 1 : 0 ); if ( $len == 1) $pass = $pass and array_key_exists ( $ID, $result['matches'] ); $results [$key] [$filter[0] . ' - ' . $filter[1]] = $pass ? 'OK' : 'FAILED'; } ]]> sphinx-2.0.4-release/test/test_046/model.bin0000644000176700017710000000261011605620330020057 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:4:{i:0;s:6:"VALUES";i:1;a:12:{i:0;s:2:"OK";i:7;s:2:"OK";i:15;s:2:"OK";i:21;s:2:"OK";s:8:"3, 8, 16";s:2:"OK";s:9:"7, 18, 21";s:2:"OK";i:6;s:2:"OK";i:12;s:2:"OK";i:20;s:2:"OK";s:5:"5, 12";s:2:"OK";s:5:"7, 17";s:2:"OK";s:6:"15, 20";s:2:"OK";}i:2;s:8:"INTERVAL";i:3;a:19:{s:5:"1 - 4";s:2:"OK";s:5:"1 - 5";s:2:"OK";s:5:"1 - 6";s:2:"OK";s:5:"1 - 7";s:2:"OK";s:6:"1 - 12";s:2:"OK";s:6:"1 - 15";s:2:"OK";s:5:"6 - 9";s:2:"OK";s:5:"7 - 9";s:2:"OK";s:6:"8 - 10";s:2:"OK";s:6:"9 - 12";s:2:"OK";s:6:"9 - 15";s:2:"OK";s:6:"9 - 21";s:2:"OK";s:7:"16 - 21";s:2:"OK";s:7:"17 - 21";s:2:"OK";s:7:"18 - 19";s:2:"OK";s:7:"18 - 25";s:2:"OK";s:7:"19 - 25";s:2:"OK";s:7:"20 - 25";s:2:"OK";s:7:"21 - 25";s:2:"OK";}}}i:1;a:1:{i:0;a:4:{i:0;s:6:"VALUES";i:1;a:12:{i:0;s:2:"OK";i:7;s:2:"OK";i:15;s:2:"OK";i:21;s:2:"OK";s:8:"3, 8, 16";s:2:"OK";s:9:"7, 18, 21";s:2:"OK";i:6;s:2:"OK";i:12;s:2:"OK";i:20;s:2:"OK";s:5:"5, 12";s:2:"OK";s:5:"7, 17";s:2:"OK";s:6:"15, 20";s:2:"OK";}i:2;s:8:"INTERVAL";i:3;a:19:{s:5:"1 - 4";s:2:"OK";s:5:"1 - 5";s:2:"OK";s:5:"1 - 6";s:2:"OK";s:5:"1 - 7";s:2:"OK";s:6:"1 - 12";s:2:"OK";s:6:"1 - 15";s:2:"OK";s:5:"6 - 9";s:2:"OK";s:5:"7 - 9";s:2:"OK";s:6:"8 - 10";s:2:"OK";s:6:"9 - 12";s:2:"OK";s:6:"9 - 15";s:2:"OK";s:6:"9 - 21";s:2:"OK";s:7:"16 - 21";s:2:"OK";s:7:"17 - 21";s:2:"OK";s:7:"18 - 19";s:2:"OK";s:7:"18 - 25";s:2:"OK";s:7:"19 - 25";s:2:"OK";s:7:"20 - 25";s:2:"OK";s:7:"21 - 25";s:2:"OK";}}}}sphinx-2.0.4-release/test/test_053/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_053/test.xml0000644000176700017710000000262411503513132017767 0ustar deogardeogar legacy matching modes emulation indexer { mem_limit = 16M } searchd { } source srctest1 { type = mysql sql_query = SELECT document_id, group_id, group_id2, title, content FROM test_table sql_attr_uint = group_id sql_attr_uint = group_id2 } source srctest2 : srctest1 { sql_query = SELECT 10+document_id, group_id, group_id2, title, content FROM test_table } index test1 { source = srctest1 path = /test1 charset_type = utf-8 } index test2 { source = srctest2 path = /test2 charset_type = utf-8 } test" CREATE TABLE test_table ( document_id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, group_id INTEGER NOT NULL, group_id2 INTEGER NOT NULL, title VARCHAR(255) NOT NULL, content VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS test_table INSERT INTO test_table ( document_id, group_id, group_id2, title, content ) VALUES ( 1, 1, 5, 'test one', 'this is my test document number one. also checking search within phrases.' ), ( 2, 1, 6, 'test two', 'this is my test document number two' ), ( 3, 2, 7, 'another doc', 'this is another group' ), ( 4, 2, 8, 'doc number four', 'this is to test groups' ) sphinx-2.0.4-release/test/test_053/model.bin0000644000176700017710000000174711136061603020070 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:9:"group_id2";i:1;}s:7:"matches";a:6:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:1:"6";}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:1:"5";}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:1:"6";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"2";s:9:"group_id2";s:1:"8";}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"2";s:9:"group_id2";s:1:"8";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test"";}}}sphinx-2.0.4-release/test/test_064/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_064/test.xml0000644000176700017710000000117111323636205017774 0ustar deogardeogar merge vs empty index indexer { mem_limit = 16M } searchd { } source empty { type = mysql sql_query = select * from (select 0, 'text') t where 0; } source delta { type = mysql sql_query = select 1, 'text'; } index empty { source = empty path = /empty } index delta { source = delta path = /delta } select 1; --merge empty delta text sphinx-2.0.4-release/test/test_064/model.bin0000644000176700017710000000056311204757035020074 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"text";}}}sphinx-2.0.4-release/test/test_071/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_071/test.xml0000644000176700017710000000272011244023256017771 0ustar deogardeogar subtree caching (part 2) indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 10M subtree_hits_cache = 0 subtree_hits_cache = 40 subtree_hits_cache = 10M } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three one' ), ( 2, 'one two three one two' ), ( 3, 'one two three one two three' ), ( 4, 'one two three four six' ), ( 5, 'one two three four five seven' ) SetMatchMode (SPH_MATCH_EXTENDED2); $client->AddQuery ('(one and two) and three'); $client->AddQuery ('one and (two and four)'); $results = $client->RunQueries (); for ( $i=0; $i<=1; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_071/model.bin0000644000176700017710000001636511227074350020076 0ustar deogardeogara:9:{i:0;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:1;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:2;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:3;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:4;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:5;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:6;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:7;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}i:8;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}}}}sphinx-2.0.4-release/test/test_020/0000755000176700017710000000000011724063141016261 5ustar deogardeogarsphinx-2.0.4-release/test/test_020/test.xml0000644000176700017710000001536411636171160017776 0ustar deogardeogar groupby indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query_pre = set time_zone='+0:00' sql_query = SELECT document_id, tag, UNIX_TIMESTAMP(time) as time, mva, body FROM test_table sql_attr_uint = TAg sql_attr_timestamp = time sql_attr_multi = uint mva from field; sql_attr_multi = bigint mva from field; } index test { source = srctest path = /test min_word_len = 1 min_prefix_len = 1 enable_star = 1 charset_type = utf-8 } source srctest2 : srctest { sql_query = SELECT document_id+10, tag+20 AS tag, UNIX_TIMESTAMP(time) as time, mva, body FROM test_table } index test2 : test { source = srctest2 path = /test2 } source srcmini1 : srctest { sql_query = SELECT document_id, tag, UNIX_TIMESTAMP(time) as time, mva, body FROM test_table WHERE document_id IN (1,7) } index mini1 : test { source = srcmini1 path = /mini1 } source srcmini2 : srctest { sql_query = SELECT document_id, tag, UNIX_TIMESTAMP(time) as time, mva, body FROM test_table WHERE document_id IN (1) } index mini2 : test { source = srcmini2 path = /mini2 } source src_mva64 { type = mysql sql_query = SELECT document_id, 11 as tag, mva1, 'dummy' FROM table64_0 sql_attr_uint = tag sql_attr_multi = bigint mva1 from field sql_attr_multi = bigint mva2 from query; SELECT id, mva2 FROM table64_1 } index mva64 { source = src_mva64 path = /mva64 } source src_mva_mixed { type = mysql sql_query = SELECT document_id, 11 as tag, mva, 'dummy' FROM test_table sql_attr_uint = tag sql_attr_multi = bigint mva2 from query; SELECT id, mva2 FROM table64_1 sql_attr_multi = uint mva from field } index mva_mixed { source = src_mva_mixed path = /mva_mixed } test* test* test* test* test* test* test* test* SELECT * FROM mva64 SELECT * FROM mva64 ORDER BY mva1 desc SELECT * FROM mva64 ORDER BY mva2 asc SELECT * FROM mva_mixed SELECT * FROM mva_mixed GROUP BY mva ORDER BY mva2 desc SELECT * FROM mva_mixed ORDER BY mva2 desc CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag` int(11) NOT NULL default '0', `time` datetime, `mva` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, '2007-12-01 12:00:00', '1,2,3', 'test1' ), ( 2, 2, '2006-05-02 12:00:00', '3,4,5', 'test2' ), ( 3, 2, '2005-06-03 12:00:00', '4,5,6', 'test3' ), ( 4, 3, '2005-07-04 12:00:00', '1,2,3', 'test4' ), ( 5, 3, '2003-09-05 12:00:00', '3,5', 'test5' ), ( 6, 3, '2003-09-06 12:00:00', '3,5', 'test6' ), ( 7, 4, '2007-12-01 12:00:00', '4,5', 'test7' ), ( 8, 4, '2006-05-02 12:00:00', '4,5,6', 'test8' ), ( 9, 4, '2005-06-03 12:00:00', '4', 'test9' ), ( 10, 4,'2005-07-04 12:00:00', '3,4,5', 'test10' ) CREATE TABLE `table64_0` ( `document_id` int(11) NOT NULL default '0', `mva1` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `table64_0` INSERT INTO `table64_0` VALUES ( 1, '10000000112 10000000124 10000000110 10000000111 10000000113' ), ( 2, '10000000242 10000000224 10000000210 10000000221 10000000213' ), ( 3, '10000000332 10000000314 10000000310 10000000391 10000000393' ), ( 4, '10000000442 10000000424 10000000410 10000000461 10000000413' ) CREATE TABLE `table64_1` ( `id` int(11) NOT NULL default '0', `mva2` bigint NOT NULL default 0 ) DROP TABLE IF EXISTS `table64_1` INSERT INTO `table64_1` VALUES ( 1, 50000000112), (1, 50000000124), (1, 50000000110), (1, 50000000111), (1, 50000000113), ( 2, 50000000242), (2, 50000000224), (2, 50000000210), (2, 50000000221), (2, 50000000213), ( 3, 50000000332), (3, 50000000314), (3, 50000000310), (3, 50000000391), (3, 50000000393), ( 4, 50000000442), (4, 50000000424), (4, 50000000410), (4, 50000000461), (4, 50000000413) sphinx-2.0.4-release/test/test_020/model.bin0000644000176700017710000022216611636171160020067 0ustar deogardeogara:2:{i:0;a:28:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:8:"20071201";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:8:"20060502";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:8:"20050704";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:8:"20050603";s:6:"@count";s:1:"2";}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062849600";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:8:"20030906";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:8:"20030905";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:7:"2007329";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:7:"2006120";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:7:"2005184";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:7:"2005149";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:7:"2003243";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:6:"200712";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:6:"200605";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:6:"200507";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:6:"200506";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:6:"200309";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}}i:2;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}}i:3;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}}i:4;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:5;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:1;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}i:1;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:2;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}}i:3;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}}i:4;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}}i:5;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:1;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"3";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"21";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"22";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"22";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"3";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"23";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:13;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"4";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"4";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"8";s:9:"@distinct";s:1:"6";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"2";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:14;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"3";s:9:"@distinct";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1117800000";s:3:"mva";a:1:{i:0;s:1:"4";}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741825;s:1:"q";i:6;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:1:"q";i:10;s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:1:"q";i:30;s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062849600";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:1:"q";i:60;s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:1:"q";i:100;s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:17;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:18;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:20;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:3:"mva";i:1073741825;s:4:"mva2";i:1073741826;}s:7:"matches";a:10:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:0:{}}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:1:{i:0;s:1:"4";}s:4:"mva2";a:0:{}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:0:{}}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:21;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:3:"mva";i:1073741825;s:4:"mva2";i:1073741826;}s:7:"matches";a:10:{i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:0:{}}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:1:{i:0;s:1:"4";}s:4:"mva2";a:0:{}}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:0:{}}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:22;a:3:{s:8:"sphinxql";s:19:"SELECT * FROM mva64";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}}}i:23;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM mva64 ORDER BY mva1 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:2;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:3;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}}}i:24;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM mva64 ORDER BY mva2 asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}}}i:25;a:3:{s:8:"sphinxql";s:23:"SELECT * FROM mva_mixed";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:4;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"4,5";s:4:"mva2";s:0:"";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:0:"";}i:8;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:1:"4";s:4:"mva2";s:0:"";}i:9;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:0:"";}}}i:26;a:3:{s:8:"sphinxql";s:55:"SELECT * FROM mva_mixed GROUP BY mva ORDER BY mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}i:2;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}i:3;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}i:4;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}i:5;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}}i:27;a:3:{s:8:"sphinxql";s:42:"SELECT * FROM mva_mixed ORDER BY mva2 desc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:0:"";}i:1;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:1:"4";s:4:"mva2";s:0:"";}i:2;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:0:"";}i:3;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"4,5";s:4:"mva2";s:0:"";}i:4;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:5;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:6;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:7;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:8;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:9;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}}}}i:1;a:28:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;i:4;i:1;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;i:4;i:1;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;i:4;i:1;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;i:4;i:1;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1196510400";s:3:"mva";a:2:{i:0;i:4;i:1;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:8:"20071201";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:8:"20060502";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:8:"20050704";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:8:"20050603";s:6:"@count";s:1:"2";}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062849600";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:8:"20030906";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:8:"20030905";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:7:"2007329";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:7:"2006120";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:7:"2005184";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:7:"2005149";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:7:"2003243";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:6:"200712";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:6:"200605";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:6:"200507";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:6:"200506";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:6:"200309";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}}i:2;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}}i:3;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}}i:4;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:5;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:1;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:6:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}i:1;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:2;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}}i:3;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}}i:4;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}}i:5;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:1;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"3";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"21";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"22";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"22";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"3";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:2:"23";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:13;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"4";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1146571200";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:8:"@groupby";s:4:"2006";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"4";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:4:"2005";s:6:"@count";s:1:"8";s:9:"@distinct";s:1:"6";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:4:"2003";s:6:"@count";s:1:"4";s:9:"@distinct";s:1:"2";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:14;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;s:9:"@distinct";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:4:"2007";s:6:"@count";s:1:"3";s:9:"@distinct";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062763200";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1117800000";s:3:"mva";a:1:{i:0;i:4;}s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"tag";i:1;s:4:"time";i:2;s:3:"mva";i:1073741826;s:1:"q";i:6;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"1";s:4:"time";s:10:"1196510400";s:3:"mva";a:3:{i:0;i:1;i:1;i:2;i:2;i:3;}s:1:"q";i:10;s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"2";s:4:"time";s:10:"1117800000";s:3:"mva";a:3:{i:0;i:4;i:1;i:5;i:2;i:6;}s:1:"q";i:30;s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"3";s:4:"time";s:10:"1062849600";s:3:"mva";a:2:{i:0;i:3;i:1;i:5;}s:1:"q";i:60;s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"3";}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:6:{s:3:"tag";s:1:"4";s:4:"time";s:10:"1120478400";s:3:"mva";a:3:{i:0;i:3;i:1;i:4;i:2;i:5;}s:1:"q";i:100;s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test*";}i:17;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:18;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:4:"mva1";i:1073741826;s:4:"mva2";i:1073741826;}s:7:"matches";a:4:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000410";i:1;s:11:"10000000413";i:2;s:11:"10000000424";i:3;s:11:"10000000442";i:4;s:11:"10000000461";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000310";i:1;s:11:"10000000314";i:2;s:11:"10000000332";i:3;s:11:"10000000391";i:4;s:11:"10000000393";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000210";i:1;s:11:"10000000213";i:2;s:11:"10000000221";i:3;s:11:"10000000224";i:4;s:11:"10000000242";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:4:"mva1";a:5:{i:0;s:11:"10000000110";i:1;s:11:"10000000111";i:2;s:11:"10000000112";i:3;s:11:"10000000113";i:4;s:11:"10000000124";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:20;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:3:"mva";i:1073741825;s:4:"mva2";i:1073741826;}s:7:"matches";a:10:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:0:{}}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:1:{i:0;s:1:"4";}s:4:"mva2";a:0:{}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:0:{}}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:21;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"dummy";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:3:"mva";i:1073741825;s:4:"mva2";i:1073741826;}s:7:"matches";a:10:{i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:0:{}}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:1:{i:0;s:1:"4";}s:4:"mva2";a:0:{}}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:0:{}}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"4";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:2:{i:0;s:1:"3";i:1;s:1:"5";}s:4:"mva2";a:0:{}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000410";i:1;s:11:"50000000413";i:2;s:11:"50000000424";i:3;s:11:"50000000442";i:4;s:11:"50000000461";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:4:"mva2";a:5:{i:0;s:11:"50000000310";i:1;s:11:"50000000314";i:2;s:11:"50000000332";i:3;s:11:"50000000391";i:4;s:11:"50000000393";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:1:"4";i:2;s:1:"5";}s:4:"mva2";a:5:{i:0;s:11:"50000000210";i:1;s:11:"50000000213";i:2;s:11:"50000000221";i:3;s:11:"50000000224";i:4;s:11:"50000000242";}}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}s:4:"mva2";a:5:{i:0;s:11:"50000000110";i:1;s:11:"50000000111";i:2;s:11:"50000000112";i:3;s:11:"50000000113";i:4;s:11:"50000000124";}}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:22;a:3:{s:8:"sphinxql";s:19:"SELECT * FROM mva64";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}}}i:23;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM mva64 ORDER BY mva1 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:2;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:3;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}}}i:24;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM mva64 ORDER BY mva2 asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000110,10000000111,10000000112,10000000113,10000000124";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000210,10000000213,10000000221,10000000224,10000000242";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000310,10000000314,10000000332,10000000391,10000000393";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:4:"mva1";s:59:"10000000410,10000000413,10000000424,10000000442,10000000461";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}}}i:25;a:3:{s:8:"sphinxql";s:23:"SELECT * FROM mva_mixed";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:4;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"4,5";s:4:"mva2";s:0:"";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:0:"";}i:8;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:1:"4";s:4:"mva2";s:0:"";}i:9;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:0:"";}}}i:26;a:3:{s:8:"sphinxql";s:55:"SELECT * FROM mva_mixed GROUP BY mva ORDER BY mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"2";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"7";}i:2;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"6";}i:3;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"6";}i:4;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"2";}i:5;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}}i:27;a:3:{s:8:"sphinxql";s:42:"SELECT * FROM mva_mixed ORDER BY mva2 desc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:0:"";}i:1;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:1:"4";s:4:"mva2";s:0:"";}i:2;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:0:"";}i:3;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"4,5";s:4:"mva2";s:0:"";}i:4;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:5;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:3:"3,5";s:4:"mva2";s:0:"";}i:6;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000410,50000000413,50000000424,50000000442,50000000461";}i:7;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"4,5,6";s:4:"mva2";s:59:"50000000310,50000000314,50000000332,50000000391,50000000393";}i:8;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"3,4,5";s:4:"mva2";s:59:"50000000210,50000000213,50000000221,50000000224,50000000242";}i:9;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"tag";s:2:"11";s:3:"mva";s:5:"1,2,3";s:4:"mva2";s:59:"50000000110,50000000111,50000000112,50000000113,50000000124";}}}}}sphinx-2.0.4-release/test/test_102/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_102/test.xml0000644000176700017710000000231411503513132017756 0ustar deogardeogar deadlock on threaded exit indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT id, t_zlib, at_1, t_mysql, plain, at_2, at_3, t_mysql_2 FROM test_table sql_attr_uint = at_1 sql_attr_uint = at_2 sql_attr_uint = at_3 } index test_idx { source = srctest path = /test } CREATE TABLE test_table ( id integer primary key not null auto_increment, t_zlib blob, t_mysql blob, t_mysql_2 blob, plain varchar(256), at_1 int not null default 1, at_2 int not null default 2, at_3 int not null default 3 ) ENGINE=MYISAM DROP TABLE IF EXISTS test_table; INSERT INTO test_table (t_zlib, t_mysql, t_mysql_2, plain) VALUES ( 'zlib', NULL, NULL, 'plain' ), ( NULL, 'mysql', NULL, NULL ), ( 'test', 'hello', 'world', '' ), ( 'malformed', 'broken', NULL, NULL ), ( '', '', '', '' ); zlib mysql hello world plain malformed broken sphinx-2.0.4-release/test/test_102/model.bin0000644000176700017710000000640511325131255020060 0ustar deogardeogara:1:{i:0;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"zlib";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"zlib";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"mysql";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"mysql";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"hello world";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"plain";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"plain";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"malformed";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"malformed";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:2;s:4:"at_3";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"broken";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"broken";}}}sphinx-2.0.4-release/test/test_074/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_074/test.xml0000644000176700017710000000264411244023256020001 0ustar deogardeogar subtree caching (part 4) indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 10M subtree_hits_cache = 0 subtree_hits_cache = 40 subtree_hits_cache = 10M } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three one' ), ( 2, 'one two three one two' ), ( 3, 'one two three one two three' ), ( 4, 'one two three four six' ), ( 5, 'one two three four five seven' ) AddQuery ('one two three four'); $client->AddQuery ('one two three five'); $client->AddQuery ('one two three six'); $client->AddQuery ('one twoo seven'); $results = $client->RunQueries (); for ( $i=0; $i<=3; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_074/model.bin0000644000176700017710000004017011227074350020070 0ustar deogardeogara:9:{i:0;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:2;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:3;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:4;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:5;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:6;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:7;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:8;a:1:{i:0;a:4:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:4:"twoo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"seven";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}} sphinx-2.0.4-release/test/test_070/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_070/test.xml0000644000176700017710000000263111244023256017771 0ustar deogardeogar subtree caching (part 1) indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 10M subtree_hits_cache = 0 subtree_hits_cache = 40 subtree_hits_cache = 10M } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three one' ), ( 2, 'one two three one two' ), ( 3, 'one two three one two three' ), ( 4, 'one two three four six' ), ( 5, 'one two three four five seven' ) AddQuery ('(one and two) and three'); $client->AddQuery ('one and (two and three)'); $results = $client->RunQueries (); for ( $i=0; $i<=1; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_070/model.bin0000644000176700017710000001637611227074350020077 0ustar deogardeogara:9:{i:0;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:1;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:2;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:3;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:4;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:5;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:6;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:7;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}i:8;a:1:{i:0;a:2:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"and";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}}}}}sphinx-2.0.4-release/test/test_065/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_065/test.xml0000644000176700017710000000264011323711623017775 0ustar deogardeogar payload indexing indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select * from sph_test; sql_joined_field = tag from payload-query; select * from sph_test_tags; } index test { source = test path = /test charset_table = 0..9, A..Z->a..z, _, a..z } create table sph_test ( id int, text varchar(255) ); drop table if exists sph_test create table sph_test_tags ( id int, tag varchar(255), weight int ); drop table if exists sph_test_tags insert into sph_test (id, text) values ( 1, 'aaa' ), ( 2, 'aaa bbb' ), ( 3, 'bbb ccc' ); insert into sph_test_tags values ( 1, 'tag_1_a tag_1_b', 100 ), ( 1, 'tag_x', 200 ), ( 2, 'tag_2', 300 ), ( 2, 'tag_x', 400 ); tag_1_a tag_1_a aaa tag_1_b tag_1_a tag_1_b tag_x tag_1_a tag_x tag_2 sphinx-2.0.4-release/test/test_065/model.bin0000644000176700017710000000615111205556051020070 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:6:"100680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.065";s:5:"words";a:1:{s:7:"tag_1_a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"tag_1_a";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:6:"101590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"tag_1_a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"aaa";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tag_1_a aaa";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:6:"100680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"tag_1_b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"tag_1_b";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:6:"200680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:7:"tag_1_a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"tag_1_b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:"tag_1_a tag_1_b";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:6:"400500";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:6:"200500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"tag_x";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"tag_x";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:6:"300590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"tag_1_a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"tag_x";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"tag_1_a tag_x";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:3:"tag";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:6:"300680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"tag_2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"tag_2";}}}sphinx-2.0.4-release/test/test_159/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_159/test.xml0000644000176700017710000000753711641335771020024 0ustar deogardeogar sorting vs string indexer { mem_limit = 16M } searchd { workers = threads } source src { type = mysql sql_query = SELECT id, idd1, str1, body FROM test_table sql_attr_uint = idd1 sql_attr_string = str1 } index plain { source = src docinfo = extern charset_type = sbcs path = /plain } index rt { type = rt docinfo = extern charset_type = sbcs path = /rt rt_attr_uint = IDD1 rt_attr_uint = iDD2 rt_attr_string = Str1 rt_attr_string = Str2 rt_field = body rt_mem_limit = 128K } index dist { type = distributed local = plain local = rt } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd1` int(11) NOT NULL default '0', `idd2` int(11) NOT NULL default '0', `str1` varchar(1024) NOT NULL default '', `str2` varchar(1024) NOT NULL default '', `body` varchar(1024) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 9, 'aaa', 'h', 'the dog' ), ( 2, 11, 8, 'c cc', 'c', 'the cat' ), ( 3, 13, 7, 'a a', 'cc', 'the bird' ), ( 4, 11, 6, 'cc', 'cc', 'cat eats bird' ), ( 5, 3, 5, 'cc', 'c', 'dog eats cat' ), ( 6, 11, 4, 'ddd', 'hh', 'bird' ), ( 7, 5, 4, 'a', 'cc', 'the' ), ( 8, 6, 4, '', 'cc', 'the' ), ( 9, 7, 4, 'a', '', 'the' ) insert into rt (id, Idd1, idd2, str1, str2, body) values ( 10, 5, 4, '', 'cc', 'the' ) insert into rt (id, idd1, IDD2, str1, str2, body) values ( 11, 5, 4, 'a', '', 'the' ) insert into rt (id, idd1, idd2, Str1, str2, body) values ( 12, 6, 4, 'a', 'cc', 'the' ) insert into rt (id, idd1, idd2, str1, STR2, body) values ( 13, 4, 4, 'ddd', 'hh', 'bird' ) insert into rt (id, idd1, idd2, str1, str2, Body) values ( 14, 3, 5, 'cc', 'c', 'dog eats cat' ) insert into rt (id, idd1, idd2, str1, str2, BODY) values ( 15, 2, 6, 'cc', 'cc', 'cat eats bird' ) insert into rt (id, idd1, idd2, str1, sTr2, body) values ( 16, 1, 7, 'a a', 'cc', 'the bird' ) insert into rt (id, idd1, idd2, stR1, str2, body) values ( 17, 1, 8, 'c cc', 'c', 'the cat' ) insert into rt (id, idd1, iDD2, str1, str2, body) values ( 18, 11, 9, 'aaa', 'h', 'the dog' ) select * from plain order by Str1 desc select * from rt order by sTR1 desc select * from rt order by STR2 asc, Str1 desc select * from dist order by Str1 desc select * from dist order by Str1 desc, IDD1 asc select * from dist where match ('the') order by Str1 desc select * from dist where match ('the') order by Str1 desc, iDD1 asc select * from dist where match ('cat | eats') order by Str1 asc select * from dist where match ('cat | eats') order by Str1 asc, sTr2 desc select * from dist where match ('cat | eats') order by Str1 asc, Idd1 asc select * from dist where match ('cat | eats') group by str1 within group order by id desc select * from dist group by str2 within group order by id desc select id, str1 from rt group by idd1 order by id asc limit 0,2 option max_matches=2 select id, str1 from rt group by idd1 order by id asc limit 2,2 option max_matches=4 sphinx-2.0.4-release/test/test_159/model.bin0000644000176700017710000003454111641335771020110 0ustar deogardeogara:1:{i:0;a:23:{i:0;a:2:{s:8:"sphinxql";s:86:"insert into rt (id, Idd1, idd2, str1, str2, body) values ( 10, 5, 4, '', 'cc', 'the' )";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:85:"insert into rt (id, idd1, IDD2, str1, str2, body) values ( 11, 5, 4, 'a', '', 'the' )";s:14:"total_affected";i:1;}i:2;a:2:{s:8:"sphinxql";s:87:"insert into rt (id, idd1, idd2, Str1, str2, body) values ( 12, 6, 4, 'a', 'cc', 'the' )";s:14:"total_affected";i:1;}i:3;a:2:{s:8:"sphinxql";s:90:"insert into rt (id, idd1, idd2, str1, STR2, body) values ( 13, 4, 4, 'ddd', 'hh', 'bird' )";s:14:"total_affected";i:1;}i:4;a:2:{s:8:"sphinxql";s:96:"insert into rt (id, idd1, idd2, str1, str2, Body) values ( 14, 3, 5, 'cc', 'c', 'dog eats cat' )";s:14:"total_affected";i:1;}i:5;a:2:{s:8:"sphinxql";s:98:"insert into rt (id, idd1, idd2, str1, str2, BODY) values ( 15, 2, 6, 'cc', 'cc', 'cat eats bird' )";s:14:"total_affected";i:1;}i:6;a:2:{s:8:"sphinxql";s:94:"insert into rt (id, idd1, idd2, str1, sTr2, body) values ( 16, 1, 7, 'a a', 'cc', 'the bird' )";s:14:"total_affected";i:1;}i:7;a:2:{s:8:"sphinxql";s:93:"insert into rt (id, idd1, idd2, stR1, str2, body) values ( 17, 1, 8, 'c cc', 'c', 'the cat' )";s:14:"total_affected";i:1;}i:8;a:2:{s:8:"sphinxql";s:93:"insert into rt (id, idd1, iDD2, str1, str2, body) values ( 18, 11, 9, 'aaa', 'h', 'the dog' )";s:14:"total_affected";i:1;}i:9;a:3:{s:8:"sphinxql";s:38:"select * from plain order by Str1 desc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:4:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"ddd";}i:1;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:2:"cc";}i:2;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:3;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:4;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"aaa";}i:5;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";s:4:"str1";s:3:"a a";}i:6;a:4:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:7;a:4:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"7";s:4:"str1";s:1:"a";}i:8;a:4:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"str1";s:0:"";}}}i:10;a:3:{s:8:"sphinxql";s:35:"select * from rt order by sTR1 desc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:6:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"4";s:4:"idd2";s:1:"4";s:4:"str1";s:3:"ddd";s:4:"str2";s:2:"hh";}i:1;a:6:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"idd2";s:1:"5";s:4:"str1";s:2:"cc";s:4:"str2";s:1:"c";}i:2;a:6:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"idd2";s:1:"6";s:4:"str1";s:2:"cc";s:4:"str2";s:2:"cc";}i:3;a:6:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"8";s:4:"str1";s:4:"c cc";s:4:"str2";s:1:"c";}i:4;a:6:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"idd2";s:1:"9";s:4:"str1";s:3:"aaa";s:4:"str2";s:1:"h";}i:5;a:6:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"7";s:4:"str1";s:3:"a a";s:4:"str2";s:2:"cc";}i:6;a:6:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"idd2";s:1:"4";s:4:"str1";s:1:"a";s:4:"str2";s:0:"";}i:7;a:6:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"idd2";s:1:"4";s:4:"str1";s:1:"a";s:4:"str2";s:2:"cc";}i:8;a:6:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"idd2";s:1:"4";s:4:"str1";s:0:"";s:4:"str2";s:2:"cc";}}}i:11;a:3:{s:8:"sphinxql";s:45:"select * from rt order by STR2 asc, Str1 desc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:6:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"idd2";s:1:"4";s:4:"str1";s:1:"a";s:4:"str2";s:0:"";}i:1;a:6:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"idd2";s:1:"5";s:4:"str1";s:2:"cc";s:4:"str2";s:1:"c";}i:2;a:6:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"8";s:4:"str1";s:4:"c cc";s:4:"str2";s:1:"c";}i:3;a:6:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"idd2";s:1:"6";s:4:"str1";s:2:"cc";s:4:"str2";s:2:"cc";}i:4;a:6:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"7";s:4:"str1";s:3:"a a";s:4:"str2";s:2:"cc";}i:5;a:6:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"idd2";s:1:"4";s:4:"str1";s:1:"a";s:4:"str2";s:2:"cc";}i:6;a:6:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"idd2";s:1:"4";s:4:"str1";s:0:"";s:4:"str2";s:2:"cc";}i:7;a:6:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"idd2";s:1:"9";s:4:"str1";s:3:"aaa";s:4:"str2";s:1:"h";}i:8;a:6:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"4";s:4:"idd2";s:1:"4";s:4:"str1";s:3:"ddd";s:4:"str2";s:2:"hh";}}}i:12;a:3:{s:8:"sphinxql";s:37:"select * from dist order by Str1 desc";s:10:"total_rows";i:18;s:4:"rows";a:18:{i:0;a:4:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"ddd";}i:1;a:4:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"4";s:4:"str1";s:3:"ddd";}i:2;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:2:"cc";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:4;a:4:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:5;a:4:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:2:"cc";}i:6;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:7;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:8;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"aaa";}i:9;a:4:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"aaa";}i:10;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";s:4:"str1";s:3:"a a";}i:11;a:4:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";}i:12;a:4:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:13;a:4:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"7";s:4:"str1";s:1:"a";}i:14;a:4:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:15;a:4:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"str1";s:1:"a";}i:16;a:4:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"str1";s:0:"";}i:17;a:4:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:0:"";}}}i:13;a:3:{s:8:"sphinxql";s:47:"select * from dist order by Str1 desc, IDD1 asc";s:10:"total_rows";i:18;s:4:"rows";a:18:{i:0;a:4:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"4";s:4:"str1";s:3:"ddd";}i:1;a:4:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"ddd";}i:2;a:4:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:2:"cc";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:4;a:4:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:5;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:2:"cc";}i:6;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:7;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:8;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"aaa";}i:9;a:4:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"aaa";}i:10;a:4:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";}i:11;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"13";s:4:"str1";s:3:"a a";}i:12;a:4:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:13;a:4:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:14;a:4:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"str1";s:1:"a";}i:15;a:4:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"7";s:4:"str1";s:1:"a";}i:16;a:4:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"str1";s:0:"";}i:17;a:4:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"6";s:4:"str1";s:0:"";}}}i:14;a:3:{s:8:"sphinxql";s:57:"select * from dist where match ('the') order by Str1 desc";s:10:"total_rows";i:12;s:4:"rows";a:12:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:1;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:2;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"aaa";}i:3;a:4:{s:2:"id";s:2:"18";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"aaa";}i:4;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"13";s:4:"str1";s:3:"a a";}i:5;a:4:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";}i:6;a:4:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:7;a:4:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"7";s:4:"str1";s:1:"a";}i:8;a:4:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:9;a:4:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"6";s:4:"str1";s:1:"a";}i:10;a:4:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"6";s:4:"str1";s:0:"";}i:11;a:4:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:0:"";}}}i:15;a:3:{s:8:"sphinxql";s:67:"select * from dist where match ('the') order by Str1 desc, iDD1 asc";s:10:"total_rows";i:12;s:4:"rows";a:12:{i:0;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:2;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"aaa";}i:3;a:4:{s:2:"id";s:2:"18";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"aaa";}i:4;a:4:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";}i:5;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1459";s:4:"idd1";s:2:"13";s:4:"str1";s:3:"a a";}i:6;a:4:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:7;a:4:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:1:"a";}i:8;a:4:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"6";s:4:"str1";s:1:"a";}i:9;a:4:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"7";s:4:"str1";s:1:"a";}i:10;a:4:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"5";s:4:"str1";s:0:"";}i:11;a:4:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1459";s:4:"idd1";s:1:"6";s:4:"str1";s:0:"";}}}i:16;a:3:{s:8:"sphinxql";s:63:"select * from dist where match ('cat | eats') order by Str1 asc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1541";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:1;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1541";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:2;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2610";s:4:"idd1";s:2:"11";s:4:"str1";s:2:"cc";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1610";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:4;a:4:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1610";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:5;a:4:{s:2:"id";s:2:"15";s:6:"weight";s:4:"2610";s:4:"idd1";s:1:"2";s:4:"str1";s:2:"cc";}}}i:17;a:3:{s:8:"sphinxql";s:74:"select * from dist where match ('cat | eats') order by Str1 asc, sTr2 desc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1541";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"8";s:4:"str1";s:4:"c cc";s:4:"str2";s:1:"c";}i:1;a:6:{s:2:"id";s:2:"15";s:6:"weight";s:4:"2610";s:4:"idd1";s:1:"2";s:4:"idd2";s:1:"6";s:4:"str1";s:2:"cc";s:4:"str2";s:2:"cc";}i:2;a:6:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1610";s:4:"idd1";s:1:"3";s:4:"idd2";s:1:"5";s:4:"str1";s:2:"cc";s:4:"str2";s:1:"c";}}}i:18;a:3:{s:8:"sphinxql";s:73:"select * from dist where match ('cat | eats') order by Str1 asc, Idd1 asc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1541";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1541";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cc";}i:2;a:4:{s:2:"id";s:2:"15";s:6:"weight";s:4:"2610";s:4:"idd1";s:1:"2";s:4:"str1";s:2:"cc";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1610";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:4;a:4:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1610";s:4:"idd1";s:1:"3";s:4:"str1";s:2:"cc";}i:5;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2610";s:4:"idd1";s:2:"11";s:4:"str1";s:2:"cc";}}}i:19;a:3:{s:8:"sphinxql";s:89:"select * from dist where match ('cat | eats') group by str1 within group order by id desc";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:6:{s:2:"id";s:2:"15";s:6:"weight";s:4:"2610";s:4:"idd1";s:1:"2";s:4:"str1";s:2:"cc";s:8:"@groupby";s:18:"465688662386187669";s:6:"@count";s:1:"4";}i:1;a:6:{s:2:"id";s:2:"17";s:6:"weight";s:4:"1541";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-1004237807469493610";s:6:"@count";s:1:"2";}}}i:20;a:3:{s:8:"sphinxql";s:63:"select * from dist group by str2 within group order by id desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"5";s:4:"idd2";s:1:"4";s:4:"str1";s:1:"a";s:4:"str2";s:0:"";s:8:"@groupby";s:1:"0";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"4";s:4:"idd2";s:1:"4";s:4:"str1";s:3:"ddd";s:4:"str2";s:2:"hh";s:8:"@groupby";s:19:"9088884694429638853";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"7";s:4:"str1";s:3:"a a";s:4:"str2";s:2:"cc";s:8:"@groupby";s:18:"465688662386187669";s:6:"@count";s:1:"4";}i:3;a:8:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"idd2";s:1:"8";s:4:"str1";s:4:"c cc";s:4:"str2";s:1:"c";s:8:"@groupby";s:20:"-1408039457890016602";s:6:"@count";s:1:"2";}i:4;a:8:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"idd2";s:1:"9";s:4:"str1";s:3:"aaa";s:4:"str2";s:1:"h";s:8:"@groupby";s:19:"5498991683371764381";s:6:"@count";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:84:"select id, str1 from rt group by idd1 order by id asc limit 0,2 option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:4:"str1";s:0:"";}i:1;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:4:"str1";s:1:"a";}}}i:22;a:3:{s:8:"sphinxql";s:84:"select id, str1 from rt group by idd1 order by id asc limit 2,2 option max_matches=4";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:4:"str1";s:3:"ddd";}i:1;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:4:"str1";s:2:"cc";}}}}}sphinx-2.0.4-release/test/test_005/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_005/test.xml0000644000176700017710000000400310744717627020002 0ustar deogardeogar prefix_fields/infix_fields (part 2) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 enable_star = 1 min_infix_len = 3 min_prefix_len = 0 prefix_fields = prefix_fields = subject prefix_fields = body prefix_fields = body, author infix_fields = infix_fields = SUBJECT infix_fields = body infix_fields = subject, AUTHOR } enab* grea* Mak* *ble* *thor* *oda* CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_005/model.bin0000644000176700017710000011102510725372550020065 0ustar deogardeogara:16:{i:0;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:1;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:2;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:3;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:4;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:5;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:6;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:7;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:8;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:9;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:10;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:11;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:12;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:13;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:14;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:15;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}}sphinx-2.0.4-release/test/test_160/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_160/multiwordorms.txt0000644000176700017710000000020711503667117021765 0ustar deogardeogarfolded many > wrapped shortform multi > SomeWhatMoreLongFormThatWouldOverflowShortFormBuffer antediluvian clumsy shandrydan > fordt sphinx-2.0.4-release/test/test_160/test.xml0000644000176700017710000001352311703766306020005 0ustar deogardeogar snippets vs SPZ searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 html_strip = 1 index_sp = 1 index_zones = zone* } index multiform { source = test path = /multiform charset_type = utf-8 wordforms = test_160/multiwordorms.txt } select 1; 'zone', 'limit'=>0 ); $docs = array(' these words and these to are cool '); $pass = 0; $results[] = '[pass] (query/result=q/r) (fast path=1/0) (query_mode=1/0)'; foreach ( array(0, 1) as $qmode ) { $opts['query_mode'] = $qmode; foreach ( $data as $q ) { $opts['limit_passages'] = 10; $results["[$pass]q"] = $q; $res = $client->BuildExcerpts($docs, 'test', $q, $opts ); $results["[$pass]r 0 $qmode"] = ( count($res)>0 ? $res[0] : ''); $opts['limit_passages'] = 0; $res = $client->BuildExcerpts($docs, 'test', $q, $opts ); $results["[$pass]r 1 $qmode"] = ( count($res)>0 ? $res[0] : '' ); $pass += 1; } } $results[] = $client->BuildExcerpts(array('small doc', 'one more'), 'multiform', 'crash on tokenizer from another doc', array ('query_mode'=>0) ); // another regression - zone transformed by stripper in result set $opts = array ( 'query_mode'=>1, 'limit_words'=>6 ); $opts['html_strip_mode'] = 'index'; $res = $client->BuildExcerpts($docs, 'test', 'these | are', $opts); $res['html_strip_mode'] = $opts['html_strip_mode']; $results[] = $res; $opts['html_strip_mode'] = 'strip'; $res= $client->BuildExcerpts($docs, 'test', 'these | are', $opts); $res['html_strip_mode'] = $opts['html_strip_mode']; $results[] = $res; $opts['html_strip_mode'] = 'index'; $opts['passage_boundary'] = 'zone'; $res = $client->BuildExcerpts($docs, 'test', 'these | are', $opts); $res['html_strip_mode'] = $opts['html_strip_mode']; $res['passage_boundary'] = $opts['passage_boundary']; $results[] = $res; $opts['html_strip_mode'] = 'strip'; $res= $client->BuildExcerpts($docs, 'test', 'these', $opts); $res['html_strip_mode'] = $opts['html_strip_mode']; $res['passage_boundary'] = $opts['passage_boundary']; $results[] = $res; // regression retain vs SPZ $docs = array( ' these words and these to are cool ', ' these words and these to are cool ' ); $opts = array ( 'limit'=>0, 'html_strip_mode'=>'retain', 'query_mode'=>1 ); $res = $client->BuildExcerpts($docs, 'test', 'ZONE:zoneZZ these', $opts); $res['zone'] = 'zoneZZ'; $results[] = $res; $res = $client->BuildExcerpts($docs, 'test', 'ZONE:zoneB these', $opts); $res['zone'] = 'ZoneB'; $results[] = $res; $res = $client->BuildExcerpts($docs, 'test', 'and PARAGRAPH words', $opts); $res['boundary'] = 'paragraph'; $results[] = $res; $results[] = $client->BuildExcerpts(array('tokenizer filter crash at lc'), 'multiform', 'crash at lc', array ('query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0) ); $results[] = $client->BuildExcerpts(array('dog dummy! as the house nearby the dog'), 'test', 'the. dog? as', array ('query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0) ); // regression head SPZ overgrow + non fast path SPZ $docs = array ( 'Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. ' ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'passage_boundary'=>'sentence', 'limit'=>90 ) ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'passage_boundary'=>'sentence', 'limit'=>0 ) ); $docs = array ( 'Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. And again store closes as usual ' ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'passage_boundary'=>'sentence', 'limit'=>90 ) ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'passage_boundary'=>'sentence', 'limit'=>0 ) ); // regression fast path SPZ vs html_strip_mode = retain $docs = array ( 'Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. And again store closes as usual ' ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0 ) ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store PARAGRAPH closes)', array ( 'query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0 ) ); $docs = array ( 'Ultra long stuff is going here then store sales, which were going before of store closes.

    Same store sales for the quarter increased as ultra long dust was here since univerce was born.

    And again store closes as usual ' ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store SENTENCE closes)', array ( 'query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0 ) ); $results[] = $client->BuildExcerpts ( $docs, 'test', '(store PARAGRAPH closes)', array ( 'query_mode'=>1, 'html_strip_mode'=>'retain', 'limit'=>0 ) ); ]]> sphinx-2.0.4-release/test/test_160/model.bin0000644000176700017710000001232311703766306020073 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:55:{i:0;s:58:"[pass] (query/result=q/r) (fast path=1/0) (query_mode=1/0)";s:4:"[0]q";s:16:"ZONE:zoneA these";s:8:"[0]r 0 0";s:48:"these words and these to are cool ";s:8:"[0]r 1 0";s:48:"these words and these to are cool ";s:4:"[1]q";s:16:"ZONE:zoneB these";s:8:"[1]r 0 0";s:48:"these words and these to are cool ";s:8:"[1]r 1 0";s:48:"these words and these to are cool ";s:4:"[2]q";s:23:"ZONE:(zoneA, zoneB) th*";s:8:"[2]r 0 0";s:48:"these words and these to are cool ";s:8:"[2]r 1 0";s:48:"these words and these to are cool ";s:4:"[3]q";s:23:"ZONE:(zoneA, zoneB) are";s:8:"[3]r 0 0";s:41:"these words and these to are cool ";s:8:"[3]r 1 0";s:41:"these words and these to are cool ";s:4:"[4]q";s:20:"ZONE:zoneZZ a* cool$";s:8:"[4]r 0 0";s:55:"these words and these to are cool ";s:8:"[4]r 1 0";s:55:"these words and these to are cool ";s:4:"[5]q";s:18:"ZONE:zoneZZ to* a*";s:8:"[5]r 0 0";s:55:"these words and these to are cool ";s:8:"[5]r 1 0";s:55:"these words and these to are cool ";s:4:"[6]q";s:16:"ZONE:zoneA these";s:8:"[6]r 0 1";s:41:"these words and these to are cool ";s:8:"[6]r 1 1";s:41:"these words and these to are cool ";s:4:"[7]q";s:16:"ZONE:zoneB these";s:8:"[7]r 0 1";s:41:"these words and these to are cool ";s:8:"[7]r 1 1";s:41:"these words and these to are cool ";s:4:"[8]q";s:23:"ZONE:(zoneA, zoneB) th*";s:8:"[8]r 0 1";N;s:8:"[8]r 1 1";N;s:4:"[9]q";s:23:"ZONE:(zoneA, zoneB) are";s:8:"[9]r 0 1";N;s:8:"[9]r 1 1";N;s:5:"[10]q";s:20:"ZONE:zoneZZ a* cool$";s:9:"[10]r 0 1";s:55:"these words and these to are cool ";s:9:"[10]r 1 1";s:48:"these words and these to are cool ";s:5:"[11]q";s:18:"ZONE:zoneZZ to* a*";s:9:"[11]r 0 1";s:55:"these words and these to are cool ";s:9:"[11]r 1 1";s:55:"these words and these to are cool ";i:1;a:2:{i:0;s:16:"small doc";i:1;s:8:"one more";}i:2;a:2:{i:0;s:48:" ... words and these to are cool ";s:15:"html_strip_mode";s:5:"index";}i:3;a:2:{i:0;s:48:" ... words and these to are cool ";s:15:"html_strip_mode";s:5:"strip";}i:4;a:3:{i:0;s:75:" ... these words ... and these to ... are cool ... ";s:15:"html_strip_mode";s:5:"index";s:16:"passage_boundary";s:4:"zone";}i:5;a:3:{i:0;s:54:" ... these words ... and these to ... ";s:15:"html_strip_mode";s:5:"strip";s:16:"passage_boundary";s:4:"zone";}i:6;a:3:{i:0;s:98:" these words and these to are cool ";i:1;s:87:" these words and these to are cool ";s:4:"zone";s:6:"zoneZZ";}i:7;a:3:{i:0;s:91:" these words and these to are cool ";i:1;s:73:" these words and these to are cool ";s:4:"zone";s:5:"ZoneB";}i:8;a:3:{i:0;s:98:" these words and these to are cool ";i:1;s:73:" these words and these to are cool ";s:8:"boundary";s:9:"paragraph";}i:9;a:1:{i:0;s:35:"tokenizer filter crash at lc";}i:10;a:1:{i:0;s:59:"dog dummy! as the house nearby the dog";}i:11;a:1:{i:0;s:104:" ... is going here then store sales, which were going before of store closes. ... ";}i:12;a:1:{i:0;s:200:"Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. ";}i:13;a:1:{i:0;s:140:" ... going here then store sales, which were going before of store closes ... store closes as usual ... ";}i:14;a:1:{i:0;s:239:"Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. And again store closes as usual ";}i:15;a:1:{i:0;s:239:"Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. And again store closes as usual ";}i:16;a:1:{i:0;s:246:"Ultra long stuff is going here then store sales, which were going before of store closes. Same store sales for the quarter increased as ultra long dust was here since univerce was born. And again store closes as usual ";}i:17;a:1:{i:0;s:247:"Ultra long stuff is going here then store sales, which were going before of store closes.

    Same store sales for the quarter increased as ultra long dust was here since univerce was born.

    And again store closes as usual ";}i:18;a:1:{i:0;s:247:"Ultra long stuff is going here then store sales, which were going before of store closes.

    Same store sales for the quarter increased as ultra long dust was here since univerce was born.

    And again store closes as usual ";}}}}sphinx-2.0.4-release/test/test_174/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_174/readme.txt0000644000176700017710000000021211546323464020275 0ustar deogardeogarOn some OS (e.g. openSuse 10.3 ) on some PHP versions (e.g. 5.2.11) fread works incorrect if third argument ($length) is more than 8192. sphinx-2.0.4-release/test/test_174/test.xml0000644000176700017710000000157511546323464020015 0ustar deogardeogar php fread from socket test indexer { mem_limit = 32M } searchd { } source test { type = mysql sql_query = select *, 'garbage' as garbage from test_table sql_attr_string = garbage } index test { source = test path = /test } create table test_table ( id int not null, title varchar(255) not null ); drop table if exists test_table; hello sphinx-2.0.4-release/test/test_174/model.bin0000644000176700017710000024056111546323464020106 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:7:"garbage";i:7;}s:7:"matches";a:1000:{i:1;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:2;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:3;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:4;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:5;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:6;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:7;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:8;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:9;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:10;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:11;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:12;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:13;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:14;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:15;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:16;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:17;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:18;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:19;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:20;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:21;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:22;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:23;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:24;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:25;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:26;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:27;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:28;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:29;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:30;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:31;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:32;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:33;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:34;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:35;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:36;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:37;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:38;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:39;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:40;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:41;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:42;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:43;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:44;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:45;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:46;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:47;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:48;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:49;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:50;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:51;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:52;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:53;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:54;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:55;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:56;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:57;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:58;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:59;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:60;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:61;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:62;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:63;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:64;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:65;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:66;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:67;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:68;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:69;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:70;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:71;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:72;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:73;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:74;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:75;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:76;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:77;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:78;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:79;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:80;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:81;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:82;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:83;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:84;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:85;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:86;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:87;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:88;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:89;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:90;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:91;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:92;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:93;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:94;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:95;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:96;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:97;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:98;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:99;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:100;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:101;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:102;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:103;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:104;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:105;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:106;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:107;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:108;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:109;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:110;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:111;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:112;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:113;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:114;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:115;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:116;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:117;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:118;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:119;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:120;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:121;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:122;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:123;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:124;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:125;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:126;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:127;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:128;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:129;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:130;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:131;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:132;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:133;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:134;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:135;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:136;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:137;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:138;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:139;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:140;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:141;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:142;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:143;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:144;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:145;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:146;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:147;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:148;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:149;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:150;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:151;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:152;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:153;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:154;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:155;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:156;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:157;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:158;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:159;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:160;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:161;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:162;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:163;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:164;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:165;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:166;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:167;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:168;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:169;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:170;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:171;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:172;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:173;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:174;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:175;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:176;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:177;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:178;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:179;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:180;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:181;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:182;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:183;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:184;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:185;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:186;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:187;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:188;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:189;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:190;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:191;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:192;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:193;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:194;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:195;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:196;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:197;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:198;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:199;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:200;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:201;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:202;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:203;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:204;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:205;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:206;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:207;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:208;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:209;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:210;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:211;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:212;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:213;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:214;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:215;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:216;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:217;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:218;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:219;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:220;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:221;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:222;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:223;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:224;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:225;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:226;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:227;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:228;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:229;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:230;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:231;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:232;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:233;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:234;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:235;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:236;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:237;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:238;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:239;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:240;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:241;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:242;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:243;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:244;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:245;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:246;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:247;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:248;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:249;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:250;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:251;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:252;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:253;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:254;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:255;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:256;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:257;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:258;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:259;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:260;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:261;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:262;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:263;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:264;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:265;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:266;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:267;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:268;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:269;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:270;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:271;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:272;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:273;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:274;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:275;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:276;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:277;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:278;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:279;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:280;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:281;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:282;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:283;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:284;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:285;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:286;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:287;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:288;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:289;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:290;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:291;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:292;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:293;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:294;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:295;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:296;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:297;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:298;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:299;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:300;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:301;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:302;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:303;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:304;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:305;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:306;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:307;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:308;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:309;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:310;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:311;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:312;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:313;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:314;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:315;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:316;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:317;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:318;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:319;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:320;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:321;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:322;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:323;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:324;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:325;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:326;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:327;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:328;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:329;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:330;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:331;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:332;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:333;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:334;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:335;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:336;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:337;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:338;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:339;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:340;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:341;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:342;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:343;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:344;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:345;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:346;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:347;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:348;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:349;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:350;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:351;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:352;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:353;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:354;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:355;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:356;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:357;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:358;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:359;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:360;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:361;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:362;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:363;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:364;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:365;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:366;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:367;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:368;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:369;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:370;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:371;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:372;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:373;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:374;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:375;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:376;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:377;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:378;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:379;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:380;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:381;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:382;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:383;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:384;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:385;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:386;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:387;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:388;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:389;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:390;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:391;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:392;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:393;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:394;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:395;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:396;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:397;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:398;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:399;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:400;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:401;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:402;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:403;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:404;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:405;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:406;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:407;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:408;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:409;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:410;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:411;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:412;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:413;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:414;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:415;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:416;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:417;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:418;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:419;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:420;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:421;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:422;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:423;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:424;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:425;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:426;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:427;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:428;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:429;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:430;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:431;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:432;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:433;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:434;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:435;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:436;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:437;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:438;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:439;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:440;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:441;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:442;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:443;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:444;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:445;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:446;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:447;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:448;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:449;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:450;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:451;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:452;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:453;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:454;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:455;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:456;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:457;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:458;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:459;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:460;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:461;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:462;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:463;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:464;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:465;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:466;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:467;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:468;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:469;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:470;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:471;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:472;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:473;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:474;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:475;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:476;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:477;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:478;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:479;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:480;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:481;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:482;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:483;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:484;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:485;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:486;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:487;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:488;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:489;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:490;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:491;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:492;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:493;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:494;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:495;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:496;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:497;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:498;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:499;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:500;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:501;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:502;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:503;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:504;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:505;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:506;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:507;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:508;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:509;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:510;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:511;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:512;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:513;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:514;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:515;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:516;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:517;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:518;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:519;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:520;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:521;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:522;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:523;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:524;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:525;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:526;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:527;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:528;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:529;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:530;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:531;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:532;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:533;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:534;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:535;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:536;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:537;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:538;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:539;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:540;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:541;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:542;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:543;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:544;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:545;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:546;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:547;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:548;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:549;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:550;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:551;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:552;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:553;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:554;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:555;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:556;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:557;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:558;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:559;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:560;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:561;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:562;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:563;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:564;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:565;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:566;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:567;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:568;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:569;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:570;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:571;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:572;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:573;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:574;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:575;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:576;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:577;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:578;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:579;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:580;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:581;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:582;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:583;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:584;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:585;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:586;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:587;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:588;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:589;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:590;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:591;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:592;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:593;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:594;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:595;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:596;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:597;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:598;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:599;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:600;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:601;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:602;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:603;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:604;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:605;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:606;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:607;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:608;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:609;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:610;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:611;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:612;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:613;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:614;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:615;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:616;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:617;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:618;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:619;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:620;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:621;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:622;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:623;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:624;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:625;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:626;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:627;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:628;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:629;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:630;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:631;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:632;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:633;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:634;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:635;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:636;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:637;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:638;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:639;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:640;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:641;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:642;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:643;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:644;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:645;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:646;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:647;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:648;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:649;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:650;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:651;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:652;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:653;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:654;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:655;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:656;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:657;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:658;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:659;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:660;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:661;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:662;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:663;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:664;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:665;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:666;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:667;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:668;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:669;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:670;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:671;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:672;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:673;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:674;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:675;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:676;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:677;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:678;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:679;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:680;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:681;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:682;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:683;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:684;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:685;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:686;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:687;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:688;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:689;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:690;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:691;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:692;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:693;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:694;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:695;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:696;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:697;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:698;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:699;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:700;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:701;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:702;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:703;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:704;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:705;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:706;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:707;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:708;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:709;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:710;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:711;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:712;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:713;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:714;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:715;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:716;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:717;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:718;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:719;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:720;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:721;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:722;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:723;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:724;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:725;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:726;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:727;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:728;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:729;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:730;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:731;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:732;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:733;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:734;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:735;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:736;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:737;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:738;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:739;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:740;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:741;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:742;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:743;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:744;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:745;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:746;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:747;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:748;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:749;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:750;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:751;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:752;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:753;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:754;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:755;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:756;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:757;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:758;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:759;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:760;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:761;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:762;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:763;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:764;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:765;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:766;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:767;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:768;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:769;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:770;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:771;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:772;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:773;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:774;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:775;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:776;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:777;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:778;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:779;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:780;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:781;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:782;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:783;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:784;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:785;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:786;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:787;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:788;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:789;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:790;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:791;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:792;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:793;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:794;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:795;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:796;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:797;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:798;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:799;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:800;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:801;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:802;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:803;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:804;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:805;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:806;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:807;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:808;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:809;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:810;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:811;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:812;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:813;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:814;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:815;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:816;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:817;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:818;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:819;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:820;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:821;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:822;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:823;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:824;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:825;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:826;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:827;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:828;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:829;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:830;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:831;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:832;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:833;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:834;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:835;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:836;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:837;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:838;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:839;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:840;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:841;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:842;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:843;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:844;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:845;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:846;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:847;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:848;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:849;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:850;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:851;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:852;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:853;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:854;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:855;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:856;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:857;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:858;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:859;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:860;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:861;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:862;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:863;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:864;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:865;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:866;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:867;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:868;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:869;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:870;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:871;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:872;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:873;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:874;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:875;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:876;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:877;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:878;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:879;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:880;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:881;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:882;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:883;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:884;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:885;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:886;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:887;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:888;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:889;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:890;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:891;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:892;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:893;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:894;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:895;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:896;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:897;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:898;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:899;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:900;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:901;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:902;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:903;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:904;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:905;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:906;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:907;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:908;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:909;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:910;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:911;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:912;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:913;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:914;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:915;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:916;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:917;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:918;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:919;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:920;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:921;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:922;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:923;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:924;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:925;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:926;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:927;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:928;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:929;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:930;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:931;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:932;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:933;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:934;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:935;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:936;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:937;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:938;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:939;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:940;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:941;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:942;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:943;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:944;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:945;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:946;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:947;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:948;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:949;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:950;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:951;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:952;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:953;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:954;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:955;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:956;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:957;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:958;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:959;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:960;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:961;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:962;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:963;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:964;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:965;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:966;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:967;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:968;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:969;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:970;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:971;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:972;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:973;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:974;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:975;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:976;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:977;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:978;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:979;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:980;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:981;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:982;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:983;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:984;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:985;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:986;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:987;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:988;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:989;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:990;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:991;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:992;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:993;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:994;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:995;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:996;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:997;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:998;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:999;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}i:1000;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:1:{s:7:"garbage";s:7:"garbage";}}}s:5:"total";s:4:"1000";s:11:"total_found";s:4:"1000";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"hello";a:2:{s:4:"docs";s:4:"1000";s:4:"hits";s:4:"1000";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"hello";}}}sphinx-2.0.4-release/test/test_144/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_144/test.xml0000644000176700017710000000351511611324334017774 0ustar deogardeogar query vs stack overflow searchd { workers = threads thread_stack = 191K binlog_path = } index test { type = rt path = /test rt_field = text rt_attr_uint = idd rt_mem_limit = 16M } CREATE TABLE `test` ( `document_id` int(11) NOT NULL default '0', `text` varchar(25) NOT NULL default '', `idd` int(11) NOT NULL default '0' ) DROP TABLE IF EXISTS `test` sphinx-2.0.4-release/test/test_144/model.bin0000644000176700017710000003016611444071270020071 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:4:{i:0;s:47:"INSERT INTO test VALUES (1, 'word1 word10', 1 )";i:1;s:16:"total affected 1";i:2;s:12253:"SELECT * FROM test WHERE MATCH('"word1 word2"~30 | "word3 word4"~30 | "word4 word5"~30 | "word5 word6"~30 | "word6 word7"~30 | "word7 word8"~30 | "word8 word9"~30 | "word9 word10"~30 | "word10 word11"~30 | "word11 word12"~30 | "word12 word13"~30 | "word13 word14"~30 | "word14 word15"~30 | "word15 word16"~30 | "word16 word17"~30 | "word17 word18"~30 | "word18 word19"~30 | "word19 word20"~30 | "word20 word21"~30 | "word21 word22"~30 | "word22 word23"~30 | "word23 word24"~30 | "word24 word25"~30 | "word25 word26"~30 | "word26 word27"~30 | "word27 word28"~30 | "word28 word29"~30 | "word29 word30"~30 | "word30 word31"~30 | "word31 word32"~30 | "word32 word33"~30 | "word33 word34"~30 | "word34 word35"~30 | "word35 word36"~30 | "word36 word37"~30 | "word37 word38"~30 | "word38 word39"~30 | "word39 word40"~30 | "word40 word41"~30 | "word41 word42"~30 | "word42 word43"~30 | "word43 word44"~30 | "word44 word45"~30 | "word45 word46"~30 | "word46 word47"~30 | "word47 word48"~30 | "word48 word49"~30 | "word49 word50"~30 | "word50 word51"~30 | "word51 word52"~30 | "word52 word53"~30 | "word53 word54"~30 | "word54 word55"~30 | "word55 word56"~30 | "word56 word57"~30 | "word57 word58"~30 | "word58 word59"~30 | "word59 word60"~30 | "word60 word61"~30 | "word61 word62"~30 | "word62 word63"~30 | "word63 word64"~30 | "word64 word65"~30 | "word65 word66"~30 | "word66 word67"~30 | "word67 word68"~30 | "word68 word69"~30 | "word69 word70"~30 | "word70 word71"~30 | "word71 word72"~30 | "word72 word73"~30 | "word73 word74"~30 | "word74 word75"~30 | "word75 word76"~30 | "word76 word77"~30 | "word77 word78"~30 | "word78 word79"~30 | "word79 word80"~30 | "word80 word81"~30 | "word81 word82"~30 | "word82 word83"~30 | "word83 word84"~30 | "word84 word85"~30 | "word85 word86"~30 | "word86 word87"~30 | "word87 word88"~30 | "word88 word89"~30 | "word89 word90"~30 | "word90 word91"~30 | "word91 word92"~30 | "word92 word93"~30 | "word93 word94"~30 | "word94 word95"~30 | "word95 word96"~30 | "word96 word97"~30 | "word97 word98"~30 | "word98 word99"~30 | "word99 word100"~30 | "word100 word101"~30 | "word101 word102"~30 | "word102 word103"~30 | "word103 word104"~30 | "word104 word105"~30 | "word105 word106"~30 | "word106 word107"~30 | "word107 word108"~30 | "word108 word109"~30 | "word109 word110"~30 | "word110 word111"~30 | "word111 word112"~30 | "word112 word113"~30 | "word113 word114"~30 | "word114 word115"~30 | "word115 word116"~30 | "word116 word117"~30 | "word117 word118"~30 | "word118 word119"~30 | "word119 word120"~30 | "word120 word121"~30 | "word121 word122"~30 | "word122 word123"~30 | "word123 word124"~30 | "word124 word125"~30 | "word125 word126"~30 | "word126 word127"~30 | "word127 word128"~30 | "word128 word129"~30 | "word129 word130"~30 | "word130 word131"~30 | "word131 word132"~30 | "word132 word133"~30 | "word133 word134"~30 | "word134 word135"~30 | "word135 word136"~30 | "word136 word137"~30 | "word137 word138"~30 | "word138 word139"~30 | "word139 word140"~30 | "word140 word141"~30 | "word141 word142"~30 | "word142 word143"~30 | "word143 word144"~30 | "word144 word145"~30 | "word145 word146"~30 | "word146 word147"~30 | "word147 word148"~30 | "word148 word149"~30 | "word149 word150"~30 | "word150 word151"~30 | "word151 word152"~30 | "word152 word153"~30 | "word153 word154"~30 | "word154 word155"~30 | "word155 word156"~30 | "word156 word157"~30 | "word157 word158"~30 | "word158 word159"~30 | "word159 word160"~30 | "word160 word161"~30 | "word161 word162"~30 | "word162 word163"~30 | "word163 word164"~30 | "word164 word165"~30 | "word165 word166"~30 | "word166 word167"~30 | "word167 word168"~30 | "word168 word169"~30 | "word169 word170"~30 | "word170 word171"~30 | "word171 word172"~30 | "word172 word173"~30 | "word173 word174"~30 | "word174 word175"~30 | "word175 word176"~30 | "word176 word177"~30 | "word177 word178"~30 | "word178 word179"~30 | "word179 word180"~30 | "word180 word181"~30 | "word181 word182"~30 | "word182 word183"~30 | "word183 word184"~30 | "word184 word185"~30 | "word185 word186"~30 | "word186 word187"~30 | "word187 word188"~30 | "word188 word189"~30 | "word189 word190"~30 | "word190 word191"~30 | "word191 word192"~30 | "word192 word193"~30 | "word193 word194"~30 | "word194 word195"~30 | "word195 word196"~30 | "word196 word197"~30 | "word197 word198"~30 | "word198 word199"~30 | "word199 word200"~30 | "word200 word201"~30 | "word201 word202"~30 | "word202 word203"~30 | "word203 word204"~30 | "word204 word205"~30 | "word205 word206"~30 | "word206 word207"~30 | "word207 word208"~30 | "word208 word209"~30 | "word209 word210"~30 | "word210 word211"~30 | "word211 word212"~30 | "word212 word213"~30 | "word213 word214"~30 | "word214 word215"~30 | "word215 word216"~30 | "word216 word217"~30 | "word217 word218"~30 | "word218 word219"~30 | "word219 word220"~30 | "word220 word221"~30 | "word221 word222"~30 | "word222 word223"~30 | "word223 word224"~30 | "word224 word225"~30 | "word225 word226"~30 | "word226 word227"~30 | "word227 word228"~30 | "word228 word229"~30 | "word229 word230"~30 | "word230 word231"~30 | "word231 word232"~30 | "word232 word233"~30 | "word233 word234"~30 | "word234 word235"~30 | "word235 word236"~30 | "word236 word237"~30 | "word237 word238"~30 | "word238 word239"~30 | "word239 word240"~30 | "word240 word241"~30 | "word241 word242"~30 | "word242 word243"~30 | "word243 word244"~30 | "word244 word245"~30 | "word245 word246"~30 | "word246 word247"~30 | "word247 word248"~30 | "word248 word249"~30 | "word249 word250"~30 | "word250 word251"~30 | "word251 word252"~30 | "word252 word253"~30 | "word253 word254"~30 | "word254 word255"~30 | "word255 word256"~30 | "word256 word257"~30 | "word257 word258"~30 | "word258 word259"~30 | "word259 word260"~30 | "word260 word261"~30 | "word261 word262"~30 | "word262 word263"~30 | "word263 word264"~30 | "word264 word265"~30 | "word265 word266"~30 | "word266 word267"~30 | "word267 word268"~30 | "word268 word269"~30 | "word269 word270"~30 | "word270 word271"~30 | "word271 word272"~30 | "word272 word273"~30 | "word273 word274"~30 | "word274 word275"~30 | "word275 word276"~30 | "word276 word277"~30 | "word277 word278"~30 | "word278 word279"~30 | "word279 word280"~30 | "word280 word281"~30 | "word281 word282"~30 | "word282 word283"~30 | "word283 word284"~30 | "word284 word285"~30 | "word285 word286"~30 | "word286 word287"~30 | "word287 word288"~30 | "word288 word289"~30 | "word289 word290"~30 | "word290 word291"~30 | "word291 word292"~30 | "word292 word293"~30 | "word293 word294"~30 | "word294 word295"~30 | "word295 word296"~30 | "word296 word297"~30 | "word297 word298"~30 | "word298 word299"~30 | "word299 word300"~30 | "word300 word301"~30 | "word301 word302"~30 | "word302 word303"~30 | "word303 word304"~30 | "word304 word305"~30 | "word305 word306"~30 | "word306 word307"~30 | "word307 word308"~30 | "word308 word309"~30 | "word309 word310"~30 | "word310 word311"~30 | "word311 word312"~30 | "word312 word313"~30 | "word313 word314"~30 | "word314 word315"~30 | "word315 word316"~30 | "word316 word317"~30 | "word317 word318"~30 | "word318 word319"~30 | "word319 word320"~30 | "word320 word321"~30 | "word321 word322"~30 | "word322 word323"~30 | "word323 word324"~30 | "word324 word325"~30 | "word325 word326"~30 | "word326 word327"~30 | "word327 word328"~30 | "word328 word329"~30 | "word329 word330"~30 | "word330 word331"~30 | "word331 word332"~30 | "word332 word333"~30 | "word333 word334"~30 | "word334 word335"~30 | "word335 word336"~30 | "word336 word337"~30 | "word337 word338"~30 | "word338 word339"~30 | "word339 word340"~30 | "word340 word341"~30 | "word341 word342"~30 | "word342 word343"~30 | "word343 word344"~30 | "word344 word345"~30 | "word345 word346"~30 | "word346 word347"~30 | "word347 word348"~30 | "word348 word349"~30 | "word349 word350"~30 | "word350 word351"~30 | "word351 word352"~30 | "word352 word353"~30 | "word353 word354"~30 | "word354 word355"~30 | "word355 word356"~30 | "word356 word357"~30 | "word357 word358"~30 | "word358 word359"~30 | "word359 word360"~30 | "word360 word361"~30 | "word361 word362"~30 | "word362 word363"~30 | "word363 word364"~30 | "word364 word365"~30 | "word365 word366"~30 | "word366 word367"~30 | "word367 word368"~30 | "word368 word369"~30 | "word369 word370"~30 | "word370 word371"~30 | "word371 word372"~30 | "word372 word373"~30 | "word373 word374"~30 | "word374 word375"~30 | "word375 word376"~30 | "word376 word377"~30 | "word377 word378"~30 | "word378 word379"~30 | "word379 word380"~30 | "word380 word381"~30 | "word381 word382"~30 | "word382 word383"~30 | "word383 word384"~30 | "word384 word385"~30 | "word385 word386"~30 | "word386 word387"~30 | "word387 word388"~30 | "word388 word389"~30 | "word389 word390"~30 | "word390 word391"~30 | "word391 word392"~30 | "word392 word393"~30 | "word393 word394"~30 | "word394 word395"~30 | "word395 word396"~30 | "word396 word397"~30 | "word397 word398"~30 | "word398 word399"~30 | "word399 word400"~30 | "word400 word401"~30 | "word401 word402"~30 | "word402 word403"~30 | "word403 word404"~30 | "word404 word405"~30 | "word405 word406"~30 | "word406 word407"~30 | "word407 word408"~30 | "word408 word409"~30 | "word409 word410"~30 | "word410 word411"~30 | "word411 word412"~30 | "word412 word413"~30 | "word413 word414"~30 | "word414 word415"~30 | "word415 word416"~30 | "word416 word417"~30 | "word417 word418"~30 | "word418 word419"~30 | "word419 word420"~30 | "word420 word421"~30 | "word421 word422"~30 | "word422 word423"~30 | "word423 word424"~30 | "word424 word425"~30 | "word425 word426"~30 | "word426 word427"~30 | "word427 word428"~30 | "word428 word429"~30 | "word429 word430"~30 | "word430 word431"~30 | "word431 word432"~30 | "word432 word433"~30 | "word433 word434"~30 | "word434 word435"~30 | "word435 word436"~30 | "word436 word437"~30 | "word437 word438"~30 | "word438 word439"~30 | "word439 word440"~30 | "word440 word441"~30 | "word441 word442"~30 | "word442 word443"~30 | "word443 word444"~30 | "word444 word445"~30 | "word445 word446"~30 | "word446 word447"~30 | "word447 word448"~30 | "word448 word449"~30 | "word449 word450"~30 | "word450 word451"~30 | "word451 word452"~30 | "word452 word453"~30 | "word453 word454"~30 | "word454 word455"~30 | "word455 word456"~30 | "word456 word457"~30 | "word457 word458"~30 | "word458 word459"~30 | "word459 word460"~30 | "word460 word461"~30 | "word461 word462"~30 | "word462 word463"~30 | "word463 word464"~30 | "word464 word465"~30 | "word465 word466"~30 | "word466 word467"~30 | "word467 word468"~30 | "word468 word469"~30 | "word469 word470"~30 | "word470 word471"~30 | "word471 word472"~30 | "word472 word473"~30 | "word473 word474"~30 | "word474 word475"~30 | "word475 word476"~30 | "word476 word477"~30 | "word477 word478"~30 | "word478 word479"~30 | "word479 word480"~30 | "word480 word481"~30 | "word481 word482"~30 | "word482 word483"~30 | "word483 word484"~30 | "word484 word485"~30 | "word485 word486"~30 | "word486 word487"~30 | "word487 word488"~30 | "word488 word489"~30 | "word489 word490"~30 | "word490 word491"~30 | "word491 word492"~30 | "word492 word493"~30 | "word493 word494"~30 | "word494 word495"~30 | "word495 word496"~30 | "word496 word497"~30 | "word497 word498"~30 | "word498 word499"~30 | "word499 word500"~30 | "word500 word501"~30 | "word501 word502"~30 | "word502 word503"~30 | "word503 word504"~30 | "word504 word505"~30 | "word505 word506"~30 | "word506 word507"~30 | "word507 word508"~30 | "word508 word509"~30 | "word509 word510"~30 | "word510 word511"~30 | "word511 word512"~30 | "word512 word513"~30 | "word513 word514"~30 | "word514 word515"~30 | "word515 word516"~30 | "word516 word517"~30 | "word517 word518"~30 | "word518 word519"~30 | "word519 word520"~30 ')";i:3;s:13:"total found 0";}}}sphinx-2.0.4-release/test/test_077/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_077/test.xml0000644000176700017710000000204511605620330017774 0ustar deogardeogar uint attrs indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT id, n, 'text' FROM test_table; sql_attr_uint = n sql_attr_multi = uint mva from query; select id, n mva from test_table sql_attr_multi = bigint mva from query; select id, n mva from test_table } index test { source = test path = /test } CREATE TABLE test_table ( id INT NOT NULL, n INT UNSIGNED NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (id, n) VALUES ( 1, 2582995467 ), ( 2, 3650268775 ), ( 3, 1452351953 ), ( 4, 1022026391 ), ( 5, 3802901620 ), ( 6, 1329722356 ); n mva sphinx-2.0.4-release/test/test_077/model.bin0000644000176700017710000001466611605620330020101 0ustar deogardeogara:2:{i:0;a:3:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"n";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:6:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"n";i:1;s:3:"mva";i:1073741825;s:5:"@expr";i:5;}s:7:"matches";a:6:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}s:5:"@expr";d:3802901504;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}s:5:"@expr";d:3650268672;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}s:5:"@expr";d:2582995456;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}s:5:"@expr";d:1452352000;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}s:5:"@expr";d:1329722368;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}s:5:"@expr";d:1022026368;}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"n";i:1;s:3:"mva";i:1073741825;s:5:"@expr";i:5;}s:7:"matches";a:6:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}s:5:"@expr";d:-1022026368;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}s:5:"@expr";d:-1329722368;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}s:5:"@expr";d:-1452352000;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}s:5:"@expr";d:-2582995456;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}s:5:"@expr";d:-3650268672;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}s:5:"@expr";d:-3802901504;}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:1;a:3:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"n";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:6:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"n";i:1;s:3:"mva";i:1073741825;s:5:"@expr";i:5;}s:7:"matches";a:6:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}s:5:"@expr";d:3802901504;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}s:5:"@expr";d:3650268672;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}s:5:"@expr";d:2582995456;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}s:5:"@expr";d:1452352000;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}s:5:"@expr";d:1329722368;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}s:5:"@expr";d:1022026368;}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"n";i:1;s:3:"mva";i:1073741825;s:5:"@expr";i:5;}s:7:"matches";a:6:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1022026391";s:3:"mva";a:1:{i:0;s:10:"1022026391";}s:5:"@expr";d:-1022026368;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1329722356";s:3:"mva";a:1:{i:0;s:10:"1329722356";}s:5:"@expr";d:-1329722368;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"1452351953";s:3:"mva";a:1:{i:0;s:10:"1452351953";}s:5:"@expr";d:-1452352000;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"2582995467";s:3:"mva";a:1:{i:0;s:10:"2582995467";}s:5:"@expr";d:-2582995456;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3650268775";s:3:"mva";a:1:{i:0;s:10:"3650268775";}s:5:"@expr";d:-3650268672;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"n";s:10:"3802901620";s:3:"mva";a:1:{i:0;s:10:"3802901620";}s:5:"@expr";d:-3802901504;}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_076/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_076/test.xml0000644000176700017710000000217211323636205020001 0ustar deogardeogar subtree cache reset on size limit indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 100 subtree_hits_cache = 1000 } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'a b c d' ), ( 2, 'a b e f' ) SetMatchMode ( SPH_MATCH_EXTENDED2 ); $client->AddQuery ("(a b)|(c d)"); $client->AddQuery ("(a b)|(e f)"); $results = $client->RunQueries (); for ( $i=0; $i<2; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_076/model.bin0000644000176700017710000000613211232040563020065 0ustar deogardeogara:3:{i:0;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:2;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"4500";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"2428";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"b";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_132/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_132/test.xml0000644000176700017710000001474211674105325020003 0ustar deogardeogar MVA persistent attribute updates indexer { mem_limit = 16M } searchd { binlog_path = # workers = threads } source src { type = mysql sql_query = SELECT id, text, section, mva1 FROM test_table sql_attr_uint = section sql_attr_multi = uint mva1 from field mva1 sql_attr_multi = bigint mva1 from field mva1 } index idx { source = src path = /main132 charset_type = utf-8 docinfo = extern } index rt { type = rt path = /rt charset_type = utf-8 docinfo = extern rt_field = body rt_attr_multi = mva1 rt_attr_uint = gid rt_attr_multi = mva2 } index hung { type = rt path = /hung charset_type = utf-8 docinfo = extern rt_field = body rt_attr_multi = mva1 rt_attr_uint = gid rt_attr_multi = mva2 } index rt_mva { type = rt path = /rt_mva charset_type = utf-8 docinfo = extern rt_mem_limit = 128k rt_field = text rt_attr_multi = mva } $v ) { $line .= $k . " = " . $v . "\t"; } $results[] = $line; } else { $results[] = $row["Variable_name"] . " = " . $row["Value"]; } } @mysql_free_result ( $res ); } return $results; '); global $sd_address, $sd_sphinxql_port; $sockStr = "$sd_address:$sd_sphinxql_port"; if ($sd_address == "localhost") $sockStr = "127.0.0.1:$sd_sphinxql_port"; $sock = @mysql_connect ($sockStr,'','', true ); if ( $sock === false ) { $results[] = "error: can't connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } @mysql_query ( "insert into rt (id, gid, mva1, mva2, body) values (1, 11, (1, 1), (2), 'dummy'), (3, 33, (3, 3), (3), 'dummy')" ); @mysql_close($sock); // update that block for ( $i = 0; $i < 4; $i++) { $results[] = sprintf( "iteration=%d", $i ); $up = $client->UpdateAttributes ( "idx", array("mva1"), array(1=>array(array(2,3,4)), 3=>array(array(6,7,8))),true); if ( $up >= 0 ) $results[] = sprintf("up.ok=%d", $up); else $results[] = sprintf("up.err=%s", $client->GetLastError()); $sock = @mysql_connect ($sockStr,'','', true ); if ( $sock === false ) { $results[] = "error: can't connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } @mysql_query ( 'update idx set mva1=(3,2, 1, 2), mva1=(1, 2) where id=1' ); if ( @mysql_error() ) $results[] = @mysql_error(); @mysql_query ( 'update rt set mva1=(3,2, 1, 2), gid=3212, mva2=(1, 2, 3, 4, 5, 6), mva2=(3,4,5) where id=1' ); if ( @mysql_error() ) $results[] = @mysql_error(); @mysql_close($sock); StopSearchd ( 'config.conf', 'searchd.pid' ); usleep ( 50000 ); $error = ""; $startSta = StartSearchd ( 'config.conf', 'error.txt', 'searchd.pid', $error ); if ( $startSta == 0 || $startSta == 2 ) { $results[] = "started=ok"; } else $results[] = sprintf("start.err=%d local=%s client=%s", $startSta, $error, $client->GetLastError()); } // regression that rt deadlock on smart attributes update $sock = @mysql_connect ($sockStr,'','', true ); if ( $sock === false ) { $results[] = "error: can't connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } for ( $i = 1; $i < 2001; $i++) { $gid = $i * 1000; $mva1 = $i * 1000 + 33; $mva2 = $i * 1000 - 11; @mysql_query ( "replace into hung (id, gid, mva1, mva2, body) values ($i, $gid, ($mva1, $mva2), ($mva1), 'dummy1')" ); if ( @mysql_error() ) $results[] = 'i=$i' . @mysql_error(); } @mysql_close($sock); // regression that rt dumps MVA to plain index wrong way $sock = @mysql_connect ($sockStr,'','', true ); if ( $sock === false ) { $results[] = "error: can't connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } $q = "INSERT INTO rt_mva (id, text, mva) VALUES "; for ( $i = 1; $i <= 16000; $i++) { $q .= "( $i, ' ', ($i) )"; if ( ( $i%100 )!=0 ) $q.= ","; if ( ( $i%100 )==0 ) { @mysql_query ( $q ); $q = "INSERT INTO rt_mva (id, text, mva) VALUES "; } if ( @mysql_error() ) $results[] = 'i=$i' . @mysql_error(); } $results = array_merge ( $results, $query ( "select * from rt_mva order by id desc limit 3", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $results = array_merge ( $results, $query ( "select * from rt_mva where mva<17000 order by id asc limit 3", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); // regressions: // crash on INSERT .. ( id, text, mva ) values ( 1, '', 15 ) // invalid syntax INSERT .. ( id, text, mva ) values ( 1, '', () ) $results = array_merge ( $results, $query ( "replace into rt_mva (id, text, mva) values ( 1, ' ', 333 )", $sock ) ); $results = array_merge ( $results, $query ( "replace into rt_mva (id, text, mva) values ( 2, ' ', () )", $sock ) ); $results = array_merge ( $results, $query ( "select * from rt_mva order by id asc limit 3", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); @mysql_close($sock); ]]> select * from idx where match('test3') select * from rt select * from idx update hung set mva1=(3,2, 1, 2) where id>2 select * from hung order by @id asc CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `section` int(11) DEFAULT NULL, `mva1` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `section`, `mva1`) VALUES (1, 'test1', 101, '1001'), (2, 'test2', 102, '1002 1023 4456'), (3, 'test3', 103, '1003 1008 1010'), (4, 'test4', 104, '1004 1005 1006'); sphinx-2.0.4-release/test/test_132/model.bin0000644000176700017710000002272011674105325020067 0ustar deogardeogara:2:{i:0;a:6:{i:0;a:36:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:10:"started=ok";i:3;s:11:"iteration=1";i:4;s:7:"up.ok=2";i:5;s:10:"started=ok";i:6;s:11:"iteration=2";i:7;s:7:"up.ok=2";i:8;s:10:"started=ok";i:9;s:11:"iteration=3";i:10;s:7:"up.ok=2";i:11;s:10:"started=ok";i:12;s:45:"select * from rt_mva order by id desc limit 3";i:13;s:34:"id = 16000 weight = 1 mva = 16000 ";i:14;s:34:"id = 15999 weight = 1 mva = 15999 ";i:15;s:34:"id = 15998 weight = 1 mva = 15998 ";i:16;s:9:"show meta";i:17;s:12:"total = 1000";i:18;s:19:"total_found = 16000";i:19;s:60:"select * from rt_mva where mva<17000 order by id asc limit 3";i:20;s:26:"id = 1 weight = 1 mva = 1 ";i:21;s:26:"id = 2 weight = 1 mva = 2 ";i:22;s:26:"id = 3 weight = 1 mva = 3 ";i:23;s:9:"show meta";i:24;s:12:"total = 1000";i:25;s:19:"total_found = 16000";i:26;s:59:"replace into rt_mva (id, text, mva) values ( 1, ' ', 333 )";i:27;s:63:"1064; raw 1, column 3: non-MVA value specified for a MVA column";i:28;s:58:"replace into rt_mva (id, text, mva) values ( 2, ' ', () )";i:29;s:44:"select * from rt_mva order by id asc limit 3";i:30;s:26:"id = 1 weight = 1 mva = 1 ";i:31;s:25:"id = 2 weight = 1 mva = ";i:32;s:26:"id = 3 weight = 1 mva = 3 ";i:33;s:9:"show meta";i:34;s:12:"total = 1000";i:35;s:19:"total_found = 16000";}i:1;a:3:{s:8:"sphinxql";s:38:"select * from idx where match('test3')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:7:"section";s:3:"103";s:4:"mva1";s:5:"6,7,8";}}}i:2;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:1:"3";s:4:"mva2";s:1:"3";}}}i:3;a:3:{s:8:"sphinxql";s:17:"select * from idx";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:3:"1,2";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:5:"6,7,8";}i:3;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:7:"section";s:3:"104";s:4:"mva1";s:14:"1004,1005,1006";}}}i:4;a:2:{s:8:"sphinxql";s:43:"update hung set mva1=(3,2, 1, 2) where id>2";s:14:"total_affected";i:1998;}i:5;a:3:{s:8:"sphinxql";s:35:"select * from hung order by @id asc";s:10:"total_rows";i:20;s:4:"rows";a:20:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"1000";s:4:"mva1";s:8:"989,1033";s:4:"mva2";s:4:"1033";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:4:"2000";s:4:"mva1";s:9:"1989,2033";s:4:"mva2";s:4:"2033";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"3033";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:4:"4000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"4033";}i:4;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:4:"5000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"5033";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"gid";s:4:"6000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"6033";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"gid";s:4:"7000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"7033";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"gid";s:4:"8000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"8033";}i:8;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"gid";s:4:"9000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"9033";}i:9;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"gid";s:5:"10000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"10033";}i:10;a:5:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"gid";s:5:"11000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"11033";}i:11;a:5:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:3:"gid";s:5:"12000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"12033";}i:12;a:5:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:3:"gid";s:5:"13000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"13033";}i:13;a:5:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:3:"gid";s:5:"14000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"14033";}i:14;a:5:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:3:"gid";s:5:"15000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"15033";}i:15;a:5:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:3:"gid";s:5:"16000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"16033";}i:16;a:5:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:3:"gid";s:5:"17000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"17033";}i:17;a:5:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:3:"gid";s:5:"18000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"18033";}i:18;a:5:{s:2:"id";s:2:"19";s:6:"weight";s:1:"1";s:3:"gid";s:5:"19000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"19033";}i:19;a:5:{s:2:"id";s:2:"20";s:6:"weight";s:1:"1";s:3:"gid";s:5:"20000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"20033";}}}}i:1;a:6:{i:0;a:36:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:10:"started=ok";i:3;s:11:"iteration=1";i:4;s:7:"up.ok=2";i:5;s:10:"started=ok";i:6;s:11:"iteration=2";i:7;s:7:"up.ok=2";i:8;s:10:"started=ok";i:9;s:11:"iteration=3";i:10;s:7:"up.ok=2";i:11;s:10:"started=ok";i:12;s:45:"select * from rt_mva order by id desc limit 3";i:13;s:34:"id = 16000 weight = 1 mva = 16000 ";i:14;s:34:"id = 15999 weight = 1 mva = 15999 ";i:15;s:34:"id = 15998 weight = 1 mva = 15998 ";i:16;s:9:"show meta";i:17;s:12:"total = 1000";i:18;s:19:"total_found = 16000";i:19;s:60:"select * from rt_mva where mva<17000 order by id asc limit 3";i:20;s:26:"id = 1 weight = 1 mva = 1 ";i:21;s:26:"id = 2 weight = 1 mva = 2 ";i:22;s:26:"id = 3 weight = 1 mva = 3 ";i:23;s:9:"show meta";i:24;s:12:"total = 1000";i:25;s:19:"total_found = 16000";i:26;s:59:"replace into rt_mva (id, text, mva) values ( 1, ' ', 333 )";i:27;s:63:"1064; raw 1, column 3: non-MVA value specified for a MVA column";i:28;s:58:"replace into rt_mva (id, text, mva) values ( 2, ' ', () )";i:29;s:44:"select * from rt_mva order by id asc limit 3";i:30;s:26:"id = 1 weight = 1 mva = 1 ";i:31;s:25:"id = 2 weight = 1 mva = ";i:32;s:26:"id = 3 weight = 1 mva = 3 ";i:33;s:9:"show meta";i:34;s:12:"total = 1000";i:35;s:19:"total_found = 16000";}i:1;a:3:{s:8:"sphinxql";s:38:"select * from idx where match('test3')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:7:"section";s:3:"103";s:4:"mva1";s:5:"6,7,8";}}}i:2;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:1:"3";s:4:"mva2";s:1:"3";}}}i:3;a:3:{s:8:"sphinxql";s:17:"select * from idx";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:3:"1,2";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:5:"6,7,8";}i:3;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:7:"section";s:3:"104";s:4:"mva1";s:14:"1004,1005,1006";}}}i:4;a:2:{s:8:"sphinxql";s:43:"update hung set mva1=(3,2, 1, 2) where id>2";s:14:"total_affected";i:1998;}i:5;a:3:{s:8:"sphinxql";s:35:"select * from hung order by @id asc";s:10:"total_rows";i:20;s:4:"rows";a:20:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"1000";s:4:"mva1";s:8:"989,1033";s:4:"mva2";s:4:"1033";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:4:"2000";s:4:"mva1";s:9:"1989,2033";s:4:"mva2";s:4:"2033";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"3033";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:4:"4000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"4033";}i:4;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:4:"5000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"5033";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"gid";s:4:"6000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"6033";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"gid";s:4:"7000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"7033";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"gid";s:4:"8000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"8033";}i:8;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"gid";s:4:"9000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:4:"9033";}i:9;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"gid";s:5:"10000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"10033";}i:10;a:5:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"gid";s:5:"11000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"11033";}i:11;a:5:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:3:"gid";s:5:"12000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"12033";}i:12;a:5:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:3:"gid";s:5:"13000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"13033";}i:13;a:5:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:3:"gid";s:5:"14000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"14033";}i:14;a:5:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:3:"gid";s:5:"15000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"15033";}i:15;a:5:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:3:"gid";s:5:"16000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"16033";}i:16;a:5:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:3:"gid";s:5:"17000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"17033";}i:17;a:5:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:3:"gid";s:5:"18000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"18033";}i:18;a:5:{s:2:"id";s:2:"19";s:6:"weight";s:1:"1";s:3:"gid";s:5:"19000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"19033";}i:19;a:5:{s:2:"id";s:2:"20";s:6:"weight";s:1:"1";s:3:"gid";s:5:"20000";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"20033";}}}}}sphinx-2.0.4-release/test/test_175/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_175/test.xml0000644000176700017710000001167711703565761020025 0ustar deogardeogar hitbuffer edge cases indexer { mem_limit = 16M } searchd { workers = threads binlog_path = # subtree_docs_cache = 128k subtree_hits_cache = 128k } source test1 { type = mysql sql_query = select * from test_table1 sql_attr_uint = gid } index test1 { source = test1 path = /test1 } source test2 { type = mysql sql_query = select * from test_table2 sql_attr_uint = gid } index test2 { source = test2 path = /test2 docinfo = extern html_strip = 1 index_sp = 1 } source test3 { type = mysql sql_query = select * from test_table3 sql_attr_uint = gid } index test3 { source = test3 path = /test3 docinfo = extern } index rt { type = rt charset_type = utf-8 path = /rt rt_field = content morphology = none } create table test_table1 ( id int not null, gid int not null, title varchar(16384) not null ); create table test_table2 ( id int not null, gid int not null, title varchar(256) not null ); create table test_table3 ( id int not null, gid int not null, title varchar(64) not null ); drop table if exists test_table1; drop table if exists test_table2; drop table if exists test_table3; SetMatchMode ( SPH_MATCH_EXTENDED2 ); // legacy $res = $client->Query ( "aaa | bbb", "test1" ); unset ( $res["time"] ); $results[] = $res; $res = $client->Query ( "box SENTENCE good", "test2" ); unset ( $res["time"] ); $results[] = $res; $results[] = 'common subtree crash'; $client->SetFilter ( "@id", array ( 999999 ) ); $client->AddQuery ( "aaa -bbb", "test3" ); $client->ResetFilters (); $client->AddQuery ( "aaa -bbb", "test3" ); $res = $client->RunQueries(); if ( !$res ) { $results[] = $client->GetLastError(); return; } foreach ( $res as $r ) { unset ( $r["time"] ); $results[] = $r; } // regression RT dictionary lost words on merge with ID64 $query = create_function ( '$q, $sock',' $results = array( $q ); $res = @mysql_query ( $q, $sock ); if ( $res===false ) { $results[] = mysql_errno( $sock ) . "; " . mysql_error ( $sock ); } else { while ($row = @mysql_fetch_array($res, MYSQL_ASSOC)) { if ( array_key_exists ( "Variable_name", $row ) && $row["Variable_name"]=="time" ) { continue; } if ( !array_key_exists ( "Variable_name", $row ) || !array_key_exists ( "Value", $row ) ) { $line = ""; foreach ( $row as $k=>$v ) { $line .= $k . " = " . $v . "\t"; } $results[] = $line; } else { $results[] = $row["Variable_name"] . " = " . $row["Value"]; } } @mysql_free_result ( $res ); } return $results; '); global $sd_address, $sd_sphinxql_port; $sockStr = "$sd_address:$sd_sphinxql_port"; if ($sd_address == "localhost") $sockStr = "127.0.0.1:$sd_sphinxql_port"; $sock = @mysql_connect ($sockStr,"","", true ); for ( $doc=1; $doc<121; $doc++ ) { $text = 'dummy text is going here'; if ( $doc==31 ) $text = 'ACT I.'; else if ( $doc==100 ) $text = 'And let us once again assail your ears, basketball'; $insert = " REPLACE INTO rt ( id, content ) VALUES ( $doc, '$text' ) "; $query ( $insert, $sock ); } $results = array_merge ( $results, $query ( "select * from rt limit 0,2", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $results = array_merge ( $results, $query ( "select * from rt where match ( 'basketball' )", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); @mysql_close ( $sock ); ]]> sphinx-2.0.4-release/test/test_175/model.bin0000644000176700017710000000664011703565761020110 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:19:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:11;a:2:{s:6:"weight";s:4:"2269";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:10;a:2:{s:6:"weight";s:4:"1221";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"515";}s:3:"bbb";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"6";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:701;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:3:"box";a:2:{s:4:"docs";s:3:"600";s:4:"hits";s:3:"600";}s:4:"good";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:20:"common subtree crash";i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:3:"600";s:4:"hits";s:3:"600";}s:3:"bbb";a:2:{s:4:"docs";s:3:"400";s:4:"hits";s:3:"400";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:20:{i:402;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"402";}}i:403;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"403";}}i:404;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"404";}}i:405;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"405";}}i:406;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"406";}}i:407;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"407";}}i:408;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"408";}}i:409;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"409";}}i:410;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"410";}}i:411;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"411";}}i:412;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"412";}}i:413;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"413";}}i:414;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"414";}}i:415;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"415";}}i:416;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"416";}}i:417;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"417";}}i:418;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"418";}}i:419;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"419";}}i:420;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"420";}}i:421;a:2:{s:6:"weight";s:4:"1386";s:5:"attrs";a:1:{s:3:"gid";s:3:"421";}}}s:5:"total";s:3:"200";s:11:"total_found";s:3:"200";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:3:"600";s:4:"hits";s:3:"600";}s:3:"bbb";a:2:{s:4:"docs";s:3:"400";s:4:"hits";s:3:"400";}}}i:5;s:26:"select * from rt limit 0,2";i:6;s:18:"id = 1 weight = 1 ";i:7;s:18:"id = 2 weight = 1 ";i:8;s:9:"show meta";i:9;s:11:"total = 120";i:10;s:17:"total_found = 120";i:11;s:45:"select * from rt where match ( 'basketball' )";i:12;s:23:"id = 100 weight = 1726 ";i:13;s:9:"show meta";i:14;s:9:"total = 1";i:15;s:15:"total_found = 1";i:16;s:23:"keyword[0] = basketball";i:17;s:11:"docs[0] = 1";i:18;s:11:"hits[0] = 1";}}}sphinx-2.0.4-release/test/test_151/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_151/test.xml0000644000176700017710000000327011537271550020000 0ustar deogardeogar html_stripper vs hit position indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd } index test { source = src path = /test charset_type = utf-8 html_strip = 1 index_sp = 1 index_zones = zone_* } CREATE TABLE test_table ( id INTEGER NOT NULL, idd INTEGER NOT NULL, title VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS `test_table`

    para' );]]>

    zone' );]]>

    broken para' );]]>

    couple

    of trooper' );]]> select * from test where match( '"the para"~2' ) select * from test where match( '"the para"~3' ) select * from test where match( '"the zone"~2' ) select * from test where match( 'ZONE:zone_A para' ) select * from test where match( 'couple PARAGRAPH trooper' ) select * from test where match( 'of PARAGRAPH trooper' ) select * from test where match( 'ZONE:zone_A "couple of"~2' ) sphinx-2.0.4-release/test/test_151/model.bin0000644000176700017710000000251711455516446020101 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:3:{s:8:"sphinxql";s:48:"select * from test where match( '"the para"~2' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:3:"idd";s:1:"1";}}}i:1;a:3:{s:8:"sphinxql";s:48:"select * from test where match( '"the para"~3' )";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:3:"idd";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:3:"idd";s:1:"3";}}}i:2;a:3:{s:8:"sphinxql";s:48:"select * from test where match( '"the zone"~2' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1569";s:3:"idd";s:1:"2";}}}i:3;a:3:{s:8:"sphinxql";s:52:"select * from test where match( 'ZONE:zone_A para' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";s:3:"idd";s:1:"3";}}}i:4;a:2:{s:8:"sphinxql";s:60:"select * from test where match( 'couple PARAGRAPH trooper' )";s:10:"total_rows";i:0;}i:5;a:3:{s:8:"sphinxql";s:56:"select * from test where match( 'of PARAGRAPH trooper' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2695";s:3:"idd";s:1:"4";}}}i:6;a:3:{s:8:"sphinxql";s:61:"select * from test where match( 'ZONE:zone_A "couple of"~2' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1695";s:3:"idd";s:1:"4";}}}}}sphinx-2.0.4-release/test/test_068/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_068/data2.xml0000644000176700017710000000100411217257342020012 0ustar deogardeogar 2 3 123 test one updated with new content and gid test five newly inserted sphinx-2.0.4-release/test/test_068/data1.xml0000644000176700017710000000133511217257342020020 0ustar deogardeogar test one this is my test document number one. also checking search within phrases. test two this is my test document number two another doc this is another group doc number four this is to test groups sphinx-2.0.4-release/test/test_068/test.xml0000644000176700017710000000114611323414741020001 0ustar deogardeogar xmlpipe2 indexing indexer { mem_limit = 16M } searchd { } source src1 { type = xmlpipe2 xmlpipe_command = cat /data1.xml } source src2 { type = xmlpipe2 xmlpipe_command = cat /data2.xml } index idx1 { source = src1 path = /idx1 charset_type = utf-8 } index idx2 { source = src2 path = /idx2 charset_type = utf-8 } test test test sphinx-2.0.4-release/test/test_068/model.bin0000644000176700017710000000322711217257342020100 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:3:"123";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:3:"123";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}}}sphinx-2.0.4-release/test/test_118/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_118/test.xml0000644000176700017710000000145311421075337020001 0ustar deogardeogar RT: order vs ram chanks indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_attr_uint = idd rt_field = content } insert into test (id,idd,content) values (1,1,'content'),(2,2,'you') insert into test (id,idd,content) values (10,10,'dog cat fish') insert into test (id,idd,content) values (11,11,'dog cat') select * from test select * from test where match( '(dog | cat) -fish' ) sphinx-2.0.4-release/test/test_118/model.bin0000644000176700017710000000225411455516446020102 0ustar deogardeogara:1:{i:0;a:6:{i:0;a:2:{s:8:"sphinxql";s:68:"insert into test (id,idd,content) values (1,1,'content'),(2,2,'you')";s:14:"total_affected";i:2;}i:1;a:2:{s:8:"sphinxql";s:63:"insert into test (id,idd,content) values (10,10,'dog cat fish')";s:14:"total_affected";i:1;}i:2;a:2:{s:8:"sphinxql";s:58:"insert into test (id,idd,content) values (11,11,'dog cat')";s:14:"total_affected";i:1;}i:3;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";}i:2;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";}i:3;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"idd";s:2:"11";}}}i:4;a:3:{s:8:"sphinxql";s:46:"select * from test where match( 'dog << cat' )";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:4:"2528";s:3:"idd";s:2:"10";}i:1;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"2528";s:3:"idd";s:2:"11";}}}i:5;a:3:{s:8:"sphinxql";s:53:"select * from test where match( '(dog | cat) -fish' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"2538";s:3:"idd";s:2:"11";}}}}}sphinx-2.0.4-release/test/test_168/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_168/test.xml0000644000176700017710000000523611552066541020013 0ustar deogardeogar snippet vs blended indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT 1, 'dummy', 1 as idd sql_attr_uint = idd } index idx { source = src docinfo = extern path = /main charset_type = utf-8 blend_chars = . } index idx2 { source = src docinfo = extern path = /idx2 morphology = stem_en charset_type = utf-8 charset_table = 0..9, a..z, A..Z->a..z blend_chars = U+0028, U+0029, U+002f, U+002b blend_mode = trim_none, trim_head, trim_tail, trim_both, skip_pure } CALL SNIPPETS ('and here we go 2524460. 989561 as you said', 'idx', '=2524460.', 1 as query_mode, 4 as limit_words, 2 as around) CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | =dude\/buddy', 1 as query_mode, 60 as limit, 2 as around) CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) dude\/buddy', 0 as query_mode, 2 as around) CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | dude\/buddy', 1 as query_mode, 2 as around) CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) buddy', 0 as query_mode, 2 as around) CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | buddy', 1 as query_mode, 2 as around) CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 0 as query_mode, 3 as limit_passages, 2 as around) CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 0 as query_mode, 0 as around, 0 as limit) CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 1 as query_mode, 3 as limit_passages, 2 as around) CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 1 as query_mode, 0 as around, 0 as limit) CREATE TABLE test_table ( id INTEGER AUTO_INCREMENT PRIMARY KEY NOT NULL, title VARCHAR(255) NOT NULL ); INSERT INTO `test_table` VALUES ( 1, 'dummy' ) DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_168/model.bin0000644000176700017710000000615111560651166020103 0ustar deogardeogara:1:{i:0;a:10:{i:0;a:3:{s:8:"sphinxql";s:128:"CALL SNIPPETS ('and here we go 2524460. 989561 as you said', 'idx', '=2524460.', 1 as query_mode, 4 as limit_words, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:39:" ... we go 2524460. 989561 ... ";}}}i:1;a:3:{s:8:"sphinxql";s:179:"CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | =dude\/buddy', 1 as query_mode, 60 as limit, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:83:" ... need here (exactly)? are dude ... know this dude/buddy.";}}}i:2;a:3:{s:8:"sphinxql";s:163:"CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) dude\/buddy', 0 as query_mode, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:107:"What are you need here (exactly)? are dude friendly? As I know this dude/buddy.";}}}i:3;a:3:{s:8:"sphinxql";s:165:"CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | dude\/buddy', 1 as query_mode, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:93:"What are you need here (exactly)? are dude friendly? As I know this dude/buddy.";}}}i:4;a:3:{s:8:"sphinxql";s:157:"CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) buddy', 0 as query_mode, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:93:"What are you need here (exactly)? are dude friendly? As I know this dude/buddy.";}}}i:5;a:3:{s:8:"sphinxql";s:159:"CALL SNIPPETS ('What are you need here (exactly)? are dude friendly? As I know this dude\/buddy.', 'idx2', '\(exactly\) | buddy', 1 as query_mode, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:93:"What are you need here (exactly)? are dude friendly? As I know this dude/buddy.";}}}i:6;a:3:{s:8:"sphinxql";s:126:"CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 0 as query_mode, 3 as limit_passages, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:41:"What are you need here+ (exactly)?";}}}i:7;a:3:{s:8:"sphinxql";s:117:"CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 0 as query_mode, 0 as around, 0 as limit)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:41:"What are you need here+ (exactly)?";}}}i:8;a:3:{s:8:"sphinxql";s:126:"CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 1 as query_mode, 3 as limit_passages, 2 as around)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:41:"What are you need here+ (exactly)?";}}}i:9;a:3:{s:8:"sphinxql";s:117:"CALL SNIPPETS ('What are you need here+ (exactly)?', 'idx2', '\(exactly\)', 1 as query_mode, 0 as around, 0 as limit)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:41:"What are you need here+ (exactly)?";}}}}}sphinx-2.0.4-release/test/test_124/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_124/field_124_match.txt0000644000176700017710000000000511403507370021650 0ustar deogardeogarmatchsphinx-2.0.4-release/test/test_124/test.xml0000644000176700017710000000231111607330411017761 0ustar deogardeogar indexing vs sql_file_field failed processing indexer { mem_limit = 16M on_file_field_error = ignore_field on_file_field_error = skip_document on_file_field_error = fail_index } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd sql_file_field = title } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE test_table ( id INTEGER NOT NULL, idd INTEGER NOT NULL, title VARCHAR(255) NOT NULL, text VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, './test_124/field_124_match.txt', 'on all' ), ( 2, 2, './test_124/field_124_match.txt', 'on none' ), ( 3, 3, './test_124/field_124_matching.txt', 'on empty' ), ( 4, 4, './test_124/field_124_match.txt', 'on empty' ) all none match on sphinx-2.0.4-release/test/test_124/model.bin0000644000176700017710000001216711607330411020064 0ustar deogardeogara:3:{i:0;a:5:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"all";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"all";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"none";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"none";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"match";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"match";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"on";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"on";}}i:1;a:5:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"all";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"all";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"none";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"none";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"match";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"match";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"idd";s:1:"4";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"on";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"on";}}i:2;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/test_056/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_056/test.xml0000644000176700017710000000457511323636205020010 0ustar deogardeogar snippets vs boundaries, utf-8 searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index index_utf8 { source = test path = /index_utf8 morphology = stem_enru min_word_len = 3 min_prefix_len = 0 min_infix_len = 0 phrase_boundary = . phrase_boundary_step = 100 charset_type = utf-8 } select 1; BuildExcerpts($docs, 'index_utf8', 'шел', $opts); // 2 $docs = array(); $docs[0] = 'С другом в Ñаду Ñ Ñидел. Мокрый ÑклонилÑÑ Ð±Ð°Ð¼Ð±ÑƒÐº. Я шел по Ñклону Фудзи. Старую женщину Ñ. Шел не Ð·Ð½Ð°Ñ ÐºÑƒÐ´Ð°. '; $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = false; $opts['exact_phrase'] = true; $opts['limit'] = 25; $results[] = $client->BuildExcerpts($docs, 'index_utf8', 'шел по Ñклону', $opts); // 3 $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = true; $opts['exact_phrase'] = true; $opts['limit'] = 25; $results[] = $client->BuildExcerpts($docs, 'index_utf8', 'шел по Ñклону', $opts); // 4 $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = true; $opts['exact_phrase'] = false; $opts['limit'] = 75; $results[] = $client->BuildExcerpts($docs, 'index_utf8', 'шел по Ñклону', $opts); ]]> sphinx-2.0.4-release/test/test_056/model.bin0000644000176700017710000000075511153676616020110 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:4:{i:0;a:2:{i:0;s:53:"### Я шел по Ñклону Фудзи.###";i:1;s:49:"Я шел по Ñклону Фудзи.###";}i:1;a:1:{i:0;s:53:"### Я шел по Ñклону Фудзи.###";}i:2;a:1:{i:0;s:53:"### Я шел по Ñклону Фудзи.###";}i:3;a:1:{i:0;s:158:"### Я шел по Ñклону Фудзи.### Мокрый ÑклонилÑÑ Ð±Ð°Ð¼Ð±ÑƒÐº.### Шел не Ð·Ð½Ð°Ñ ÐºÑƒÐ´Ð°.###";}}}}sphinx-2.0.4-release/test/test_080/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_080/test.xml0000644000176700017710000000301511503513132017762 0ustar deogardeogar merge vs write buffer corruption indexer { mem_limit = 16M write_buffer = 300000 } searchd { } source main { type = mysql sql_query = select 2, 'X' as first, 'Y' as second; } index main { source = main path = /main } source delta { type = mysql sql_query = select id, first, second from sph_test } index delta { source = delta path = /delta } create table sph_test ( id int not null, first text(1048576) not null, second text(1048576) not null ) ENGINE=MYISAM drop table if exists sph_test; --merge main delta C @second[299992] B @second[299993] B @second[299994] B "C B A A A" "C B A A A$" A A$ X Y sphinx-2.0.4-release/test/test_080/model.bin0000644000176700017710000001034311236424437020071 0ustar deogardeogara:1:{i:0;a:10:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1815";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:6:"299992";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"C";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"@second[299992] B";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"@second[299993] B";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"@second[299994] B";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"5728";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.040";s:5:"words";a:3:{s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:6:"299992";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:""C B A A A"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"5728";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.040";s:5:"words";a:3:{s:1:"c";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:6:"299992";}s:1:"b";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""C B A A A$"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"A";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"A$";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"x";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"X";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"first";i:1;s:6:"second";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"y";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"Y";}}}sphinx-2.0.4-release/test/test_063/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_063/test.xml0000644000176700017710000001321711712020702017766 0ustar deogardeogar blend characters indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table } index test { source = test path = /test blend_chars = @, |, +, ., *, !, (, ), [, ], {, } charset_type = utf-8 charset_type = sbcs } source test1 { type = mysql sql_query = SELECT 1 as document_id, 'zzzzzz buzzzz' as text } index test1 { source = test1 path = /test1 blend_chars = ., @, charset_type = utf-8 min_word_len = 2 } source test2 { type = mysql sql_query = SELECT 1 as document_id, 'dummy a11-22 text' as text UNION SELECT 2 as document_id, 'dummy 11-22 text' as text } index test2 { source = test2 path = /test2 blend_chars = -, @, charset_type = utf-8 } index star { source = test path = /star blend_chars = @, ., !, (, ), [, ], {, } charset_type = utf-8 min_infix_len = 1 enable_star = 1 } CREATE TABLE test_table ( document_id INT NOT NULL, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table INSERT INTO test_table VALUES ( 1, 'aaa bbb@ccc ddd @eee fff@ggg@hhh iii@ kkk' ), ( 2, 'ggg@hhh' ), ( 3, 'xxx @ yyy' ), ( 4, 'aaa@bbb+ccc@ddd' ), ( 5, 'aaa|eee|ccc' ), ( 6, 'a+b+c+d e+f|g' ), ( 7, 'aaa bbb*ccc ddd eee fff*ggg*hhh iii' ), ( 8, 'ggg*hhh' ), ( 9, 'aaa ddd ggg hhh' ), ( 10, 'aaa bbb ccc ddd' ), ( 11, 'hello, world. how is this gonna work if we need to handle U.S.A' ), ( 20, 'its a must!!!' ), ( 21, 'hey @dude wassup' ), ( 30, 'posse' ), ( 31, 'posse()' ), ( 32, 'posse[]' ), ( 33, 'posse{}' ); bbb|ccc bbb@ccc bbb\|ccc bbb\@ccc bbb ccc "bbb ccc" aaa|eee|ccc aaa\|eee\|ccc "aaa|eee|ccc" "aaa\|eee\|ccc" a+b+c+d a+b+c+d e "a+b+c+d e f" "a+b+c+d e+f|g" "bbb@ccc ddd @eee" "ddd @eee fff@ggg@hhh" "fff@ggg@hhh iii@ kkk" @ \@ @eee kkk \@eee kkk aaa\*ccc aaa*ccc bbb\*ccc bbb*ccc bbb ccc "bbb ccc" fff\*ggg fff*ggg fff*ggg*hhh "bbb*ccc ddd" "bbb*ccc ccc ddd" bbb*ccc << ddd hello "hello world" hello U.S.A must must\!\!\! dude \@dude posse posse\(\) posse\[\] posse\{\} "posse()" "posse[]" "posse{}" ^ggg*hhh$ ^ggg*hhh ggg*hhh$ ^ggg hhh$ ggg$ ^hhh ^gg* *hh$ ^*g* ^*h* *c*$ select * from test1 where match ( 'zzzzzz .(buzzzz)' ) select * from test1 where match ( 'a+b' ) select * from test2 where match ( 'a11-22' ) select * from test2 where match ( '11-22' ) sphinx-2.0.4-release/test/test_063/model.bin0000644000176700017710000014751211712020702020065 0ustar deogardeogara:2:{i:0;a:63:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"1537";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb|ccc";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb@ccc";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"bbb|ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\|ccc";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\@ccc";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb ccc";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""bbb ccc"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:6:{i:5;a:2:{s:6:"weight";s:4:"3585";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"2585";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2585";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1518";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:3:"aaa";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}s:3:"eee";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"aaa|eee|ccc";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"aaa\|eee\|ccc";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""aaa|eee|ccc"";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""aaa\|eee\|ccc"";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"a+b+c+d";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"a+b+c+d e";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"3722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""a+b+c+d e f"";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"e+f|g";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""a+b+c+d e+f|g"";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3673";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:""bbb@ccc ddd @eee"";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3673";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:11:"fff@ggg@hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""ddd @eee fff@ggg@hhh"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:11:"fff@ggg@hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"iii@";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""fff@ggg@hhh iii@ kkk"";}i:17;a:6:{s:5:"query";s:1:"@";s:5:"error";s:49:"index test: syntax error, unexpected $end near ''";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:1:"@";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"\@";}i:19;a:6:{s:5:"query";s:8:"@eee kkk";s:5:"error";s:55:"index test: query error: no field 'eee' found in schema";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:20;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"\@eee kkk";}i:21;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"aaa*ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"aaa\*ccc";}i:22;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"aaa*ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"aaa*ccc";}i:23;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\*ccc";}i:24;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb*ccc";}i:25;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:" bbb ccc";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""bbb ccc"";}i:27;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"fff*ggg";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"fff\*ggg";}i:28;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"fff*ggg";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"fff*ggg";}i:29;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:11:"fff*ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"fff*ggg*hhh";}i:30;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"2648";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""bbb*ccc ddd"";}i:31;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""bbb*ccc ccc ddd"";}i:32;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"2611";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:"bbb*ccc << ddd";}i:33;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"hello";}i:34;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""hello world"";}i:35;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"u.s.a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"hello U.S.A";}i:36;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:20;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"must";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"must";}i:37;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:20;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"must!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"must\!\!\!";}i:38;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dude";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"dude";}i:39;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"@dude";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"\@dude";}i:40;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:30;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:31;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:32;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:33;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"posse";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"posse";}i:41;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:31;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse()";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\(\)";}i:42;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:32;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse[]";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\[\]";}i:43;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:33;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse{}";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\{\}";}i:44;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:31;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse()";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse()"";}i:45;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:32;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse[]";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse[]"";}i:46;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:33;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"posse{}";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse{}"";}i:47;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"^ggg*hhh$";}i:48;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"^ggg*hhh";}i:49;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"ggg*hhh$";}i:50;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ggg";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^ggg";}i:51;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"hhh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"hhh$";}i:52;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ggg";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"ggg$";}i:53;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"hhh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^hhh";}i:54;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"gg*";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^gg*";}i:55;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1603";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*hh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*hh$";}i:56;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*g*";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^*g*";}i:57;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:11;a:2:{s:6:"weight";s:4:"1560";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:0:{}}i:21;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*h*";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:2:"12";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^*h*";}i:58;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"1554";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"1554";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*c*";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*c*$";}i:59;a:3:{s:8:"sphinxql";s:55:"select * from test1 where match ( 'zzzzzz .(buzzzz)' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}}}i:60;a:2:{s:8:"sphinxql";s:41:"select * from test1 where match ( 'a+b' )";s:10:"total_rows";i:0;}i:61;a:3:{s:8:"sphinxql";s:44:"select * from test2 where match ( 'a11-22' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";}}}i:62;a:3:{s:8:"sphinxql";s:43:"select * from test2 where match ( '11-22' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1643";}}}}i:1;a:63:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"1537";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb|ccc";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb@ccc";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"bbb|ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\|ccc";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\@ccc";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb ccc";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""bbb ccc"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:6:{i:5;a:2:{s:6:"weight";s:4:"3585";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"2585";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2585";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2543";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1518";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"aaa";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}s:3:"eee";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"aaa|eee|ccc";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"aaa\|eee\|ccc";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""aaa|eee|ccc"";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:11:"aaa|eee|ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""aaa\|eee\|ccc"";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"a+b+c+d";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"a+b+c+d e";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"3722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:1:"f";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""a+b+c+d e f"";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"a+b+c+d";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"e+f|g";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""a+b+c+d e+f|g"";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3673";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:7:"bbb@ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:""bbb@ccc ddd @eee"";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3673";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:11:"fff@ggg@hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""ddd @eee fff@ggg@hhh"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:11:"fff@ggg@hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"iii@";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""fff@ggg@hhh iii@ kkk"";}i:17;a:6:{s:5:"query";s:1:"@";s:5:"error";s:49:"index test: syntax error, unexpected $end near ''";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"@";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"\@";}i:19;a:6:{s:5:"query";s:8:"@eee kkk";s:5:"error";s:55:"index test: query error: no field 'eee' found in schema";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:20;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"@eee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"\@eee kkk";}i:21;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"aaa*ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"aaa\*ccc";}i:22;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"aaa*ccc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"aaa*ccc";}i:23;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"bbb\*ccc";}i:24;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"bbb*ccc";}i:25;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:" bbb ccc";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"2586";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"bbb";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""bbb ccc"";}i:27;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"fff*ggg";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"fff\*ggg";}i:28;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"fff*ggg";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"fff*ggg";}i:29;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:11:"fff*ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"fff*ggg*hhh";}i:30;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"2648";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""bbb*ccc ddd"";}i:31;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ccc";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""bbb*ccc ccc ddd"";}i:32;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"2611";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"bbb*ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"ddd";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:"bbb*ccc << ddd";}i:33;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"hello";}i:34;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"2722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""hello world"";}i:35;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:11;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"u.s.a";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"hello U.S.A";}i:36;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:20;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"must";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"must";}i:37;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:20;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"must!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"must\!\!\!";}i:38;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dude";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"dude";}i:39;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@dude";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"\@dude";}i:40;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:30;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:31;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:32;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}i:33;a:2:{s:6:"weight";s:4:"1598";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"posse";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"posse";}i:41;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:31;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse()";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\(\)";}i:42;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:32;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse[]";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\[\]";}i:43;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:33;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse{}";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"posse\{\}";}i:44;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:31;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse()";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse()"";}i:45;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:32;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse[]";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse[]"";}i:46;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:33;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"posse{}";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""posse{}"";}i:47;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"^ggg*hhh$";}i:48;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"^ggg*hhh";}i:49;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:8;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggg*hhh";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"ggg*hhh$";}i:50;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ggg";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^ggg";}i:51;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"hhh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"hhh$";}i:52;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ggg";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"ggg$";}i:53;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"hhh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^hhh";}i:54;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"gg*";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^gg*";}i:55;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1603";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1575";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*hh";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*hh$";}i:56;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*g*";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^*g*";}i:57;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:11;a:2:{s:6:"weight";s:4:"1560";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:0:{}}i:21;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*h*";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:2:"12";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^*h*";}i:58;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"1554";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"1554";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"*c*";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*c*$";}i:59;a:3:{s:8:"sphinxql";s:55:"select * from test1 where match ( 'zzzzzz .(buzzzz)' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}}}i:60;a:2:{s:8:"sphinxql";s:41:"select * from test1 where match ( 'a+b' )";s:10:"total_rows";i:0;}i:61;a:3:{s:8:"sphinxql";s:44:"select * from test2 where match ( 'a11-22' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";}}}i:62;a:3:{s:8:"sphinxql";s:43:"select * from test2 where match ( '11-22' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1643";}}}}}sphinx-2.0.4-release/test/test_098/0000755000176700017710000000000011724063141016300 5ustar deogardeogarsphinx-2.0.4-release/test/test_098/wordforms.txt0000644000176700017710000000002211551523771021065 0ustar deogardeogarrun-time > runer sphinx-2.0.4-release/test/test_098/wf3.txt0000644000176700017710000000024211556004362017541 0ustar deogardeogarword11-11word11-11 > word11-11word11-11 word2word2 > word11-11word11-11 word2word2 word4word4 > word2word2 word11-11word11-11 word5word5 > word11-11word11-11 sphinx-2.0.4-release/test/test_098/test.xml0000644000176700017710000000602611556004362020010 0ustar deogardeogar specials vs charset_table vs match_mode indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table WHERE document_id IN (1,2,3,4) } index main { source = src path = /main charset_table = 0..9, A..Z->a..z, _, a..z, (, ), $ } source src_wf { type = mysql sql_query = SELECT * FROM test_table WHERE document_id>4 } index wf1 { source = src_wf path = /wf1 morphology = stem_en charset_type = utf-8 charset_table = 0..9, A..Z->a..z, a..z, wordforms = test_098/wordforms.txt } index wf2 { source = src_wf path = /wf2 morphology = stem_en charset_type = utf-8 charset_table = 0..9, A..Z->a..z, a..z, - wordforms = test_098/wordforms.txt } source src_wf3 { type = mysql sql_query = SELECT * FROM test_wf3 } index wf3 { source = src_wf3 path = /wf3 morphology = stem_en charset_type = utf-8 charset_table = -, 0..9, A..Z->a..z, _, a..z wordforms = test_098/wf3.txt } google$com foo(bar) foo\(bar\) google$com foo(bar) foo\(bar\) "foo(bar)" run run run\-time run\-time run time run time CALL KEYWORDS ('run-time (run time) issue', 'wf1') CALL KEYWORDS ('run-time (run time) issue', 'wf2') SELECT * from wf3 where match ('word2word2') CREATE TABLE test_table ( document_id INTEGER NOT NULL PRIMARY KEY, title VARCHAR(255) NOT NULL ) CREATE TABLE test_wf3 ( document_id INTEGER NOT NULL PRIMARY KEY, title VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS test_table DROP TABLE IF EXISTS test_wf3 INSERT INTO test_table VALUES ( 1, 'google dot com' ), ( 2, 'foo beyond all bars' ), ( 3, 'mangled google$com link' ), ( 4, 'no idea why search for foo(bar), but they do' ), ( 5, 'run-time error' ), ( 6, 'running without trouble' ) INSERT INTO test_wf3 VALUES ( 1, 'word2word2 word4word4' ) sphinx-2.0.4-release/test/test_098/model.bin0000644000176700017710000001401111556004362020072 0ustar deogardeogara:1:{i:0;a:16:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:10:"google$com";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"google$com";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:8:"foo(bar)";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"foo(bar)";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"foo";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"(bar";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:1:")";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"foo\(bar\)";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:10:"google$com";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"google$com";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"foo";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"bar";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"foo(bar)";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:8:"foo(bar)";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"foo\(bar\)";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:8:"foo(bar)";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""foo(bar)"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"runer";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"run\-time";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"run-tim";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"run\-time";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"runer";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"run time";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"run time";}i:13;a:3:{s:8:"sphinxql";s:50:"CALL KEYWORDS ('run-time (run time) issue', 'wf1')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:9:"tokenized";s:8:"run time";s:10:"normalized";s:5:"runer";}i:1;a:2:{s:9:"tokenized";s:8:"run time";s:10:"normalized";s:5:"runer";}i:2;a:2:{s:9:"tokenized";s:5:"issue";s:10:"normalized";s:4:"issu";}}}i:14;a:3:{s:8:"sphinxql";s:50:"CALL KEYWORDS ('run-time (run time) issue', 'wf2')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:2:{s:9:"tokenized";s:8:"run-time";s:10:"normalized";s:7:"run-tim";}i:1;a:2:{s:9:"tokenized";s:3:"run";s:10:"normalized";s:3:"run";}i:2;a:2:{s:9:"tokenized";s:4:"time";s:10:"normalized";s:4:"time";}i:3;a:2:{s:9:"tokenized";s:5:"issue";s:10:"normalized";s:4:"issu";}}}i:15;a:3:{s:8:"sphinxql";s:44:"SELECT * from wf3 where match ('word2word2')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}}}}}sphinx-2.0.4-release/test/test_163/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_163/test.xml0000644000176700017710000001473611640147445020014 0ustar deogardeogar string attribute vs different indexes, attributes UNION 3 indexer { mem_limit = 16M } searchd { workers = threads compat_sphinxql_magics = 1 compat_sphinxql_magics = 0 } source src_main { type = mysql sql_query = SELECT id, body, CONCAT(str1, ' main') AS str1, idd1, 100 as tag FROM test_table sql_attr_uint = idd1 sql_attr_uint = tag sql_attr_string = str1 } source src_delta1 { type = mysql sql_query = SELECT id, body, CONCAT(str1, ' delta1') AS str1, idd1, 101 as tag FROM test_table where id>3 sql_attr_uint = idd1 sql_attr_uint = tag sql_attr_string = str1 sql_query_killlist = SELECT 2 } source src_delta2 { type = mysql sql_query = SELECT id, body, 'delta2' as str1, CONCAT(str1, ' delta2') AS str2, idd1, 102 as tag FROM test_table where id>4 sql_attr_uint = idd1 sql_attr_uint = tag sql_attr_string = str1 sql_attr_string = str2 sql_query_killlist = SELECT 2 } index main { source = src_main docinfo = extern charset_type = sbcs path = /main } index delta1 { source = src_delta1 docinfo = extern charset_type = sbcs path = /delta1 } index delta2 { source = src_delta2 docinfo = extern charset_type = sbcs path = /delta2 } index loc_dist1 { type = distributed local = main local = delta1 } index loc_dist2 { type = distributed local = main local = delta2 } index dist1 { type = distributed local = main agent = :agent1_delta2 } index dist2 { type = distributed local = delta2 agent = :agent2_delta1 } index dist3 { type = distributed agent = :agent1_main agent = :agent2_delta1 } source src_bool { type = mysql sql_query = SELECT id, body, idd1>2 as attr FROM test_table sql_attr_bool = attr } source src_uint { type = mysql sql_query = SELECT id, body, idd1*100 as attr FROM test_table sql_attr_uint = attr } source src_float { type = mysql sql_query = SELECT id, body, idd1/1.73 as attr FROM test_table sql_attr_float = attr } source src_bigint { type = mysql sql_query = SELECT id, body, idd1*100000000000 as attr FROM test_table sql_attr_bigint = attr } index u_bool { source = src_bool docinfo = extern path = /u_bool } index u_uint { source = src_uint docinfo = extern path = /u_uint } index u_float { source = src_float docinfo = extern path = /u_float } index u_bigint { source = src_bigint docinfo = extern path = /u_bigint } index u_dist10 { type = distributed local = u_bool local = u_float } index u_dist11 { type = distributed local = u_float local = u_bool } index u_dist20 { type = distributed local = u_bool local = u_uint local = u_bigint } index u_dist21 { type = distributed local = u_bool local = u_bigint local = u_uint } index u_dist22 { type = distributed local = u_bigint local = u_bool local = u_uint } index u_dist23 { type = distributed local = u_bigint local = u_uint local = u_bool } index agent1_main { source = src_main docinfo = extern charset_type = sbcs path = /agent1_main } index agent1_delta2 { source = src_delta2 docinfo = extern charset_type = sbcs path = /agent1_delta2 } index agent2_delta1 { source = src_delta1 docinfo = extern charset_type = sbcs path = /agent2_delta1 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd1` int(11) NOT NULL default '0', `str1` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 'a aa', 'the dog' ), ( 2, 11, 'c cc', 'the cat' ), ( 3, 11, 'a a', 'the bird' ), ( 4, 11, 'cc c', 'cat eats bird' ), ( 5, 3, 'a a', 'dog eats cat' ), ( 6, 1, 'c cc', 'bird' ), ( 7, 1, 'a a', 'the cat' ), ( 8, 1, 'a a', 'eats' ), ( 9, 3, 'c cc', 'the' ) select * from loc_dist1 order by str1 asc select * from loc_dist1 order by str1 desc select * from loc_dist2 order by str1 asc, idd1 desc select * from loc_dist2 order by str1 desc, idd1 asc select * from loc_dist2 order by str2 asc select * from loc_dist2 order by str2 desc select * from loc_dist2 order by str2 desc, idd1 desc select * from dist1 order by str2 asc, idd1 desc select * from dist1 order by str2 desc, idd1 asc select * from dist2 order by str1 asc select * from dist2 order by str1 desc select * from dist3 order by str1 asc select * from dist3 order by str1 desc, idd1 desc select * from u_bool, u_float select * from u_float, u_bool select * from u_bool, u_uint, u_bigint select * from u_uint, u_bool, u_bigint select * from u_uint, u_bigint, u_bool select * from u_bigint, u_uint, u_bool select * from u_dist10 select * from u_dist11 select * from u_dist20 select * from u_dist21 select * from u_dist22 select * from u_dist23 select * from u_float, u_uint select * from u_float, u_bigint select * from u_uint, u_float select * from u_bigint, u_float sphinx-2.0.4-release/test/test_163/model.bin0000644000176700017710000012156711640147445020106 0ustar deogardeogara:2:{i:0;a:29:{i:0;a:3:{s:8:"sphinxql";s:41:"select * from loc_dist1 order by str1 asc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:1;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:3;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:4;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:6;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:1;a:3:{s:8:"sphinxql";s:42:"select * from loc_dist1 order by str1 desc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:3;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:4;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:5;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}}}i:2;a:3:{s:8:"sphinxql";s:52:"select * from loc_dist2 order by str1 asc, idd1 desc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:1;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:2;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:9:"cc c main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:3;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:6;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:7;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:3;a:3:{s:8:"sphinxql";s:52:"select * from loc_dist2 order by str1 desc, idd1 asc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:9:"cc c main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:6;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:7;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}}}i:4;a:3:{s:8:"sphinxql";s:41:"select * from loc_dist2 order by str2 asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:5;a:3:{s:8:"sphinxql";s:42:"select * from loc_dist2 order by str2 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:3;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:6;a:3:{s:8:"sphinxql";s:53:"select * from loc_dist2 order by str2 desc, idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:3;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:7;a:3:{s:8:"sphinxql";s:48:"select * from dist1 order by str2 asc, idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:8;a:3:{s:8:"sphinxql";s:48:"select * from dist1 order by str2 desc, idd1 asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:9;a:3:{s:8:"sphinxql";s:37:"select * from dist2 order by str1 asc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:5;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:10;a:3:{s:8:"sphinxql";s:38:"select * from dist2 order by str1 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:11;a:3:{s:8:"sphinxql";s:37:"select * from dist3 order by str1 asc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:1;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:3;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:4;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:6;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"str1";s:9:"c cc main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:8;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:12;a:3:{s:8:"sphinxql";s:49:"select * from dist3 order by str1 desc, idd1 desc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"str1";s:9:"c cc main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:2;a:5:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:3;a:5:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:4;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:6;a:5:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:5:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:8;a:5:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}}}i:13;a:3:{s:8:"sphinxql";s:29:"select * from u_bool, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.734100";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.734100";}}}i:14;a:3:{s:8:"sphinxql";s:29:"select * from u_float, u_bool";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}}}i:15;a:3:{s:8:"sphinxql";s:38:"select * from u_bool, u_uint, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}}}i:16;a:3:{s:8:"sphinxql";s:38:"select * from u_uint, u_bool, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}}}i:17;a:3:{s:8:"sphinxql";s:38:"select * from u_uint, u_bigint, u_bool";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:39:"select * from u_bigint, u_uint, u_bool ";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:22:"select * from u_dist10";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:8:"6.358400";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.734100";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.578000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.734100";}}}i:20;a:3:{s:8:"sphinxql";s:22:"select * from u_dist11";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:8:"0.000000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:8:"1.000000";}}}i:21;a:3:{s:8:"sphinxql";s:22:"select * from u_dist20";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:13:"1100000000000";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:12:"100000000000";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:12:"300000000000";}}}i:22;a:3:{s:8:"sphinxql";s:22:"select * from u_dist21";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:3:"300";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:3:"300";}}}i:23;a:3:{s:8:"sphinxql";s:22:"select * from u_dist22";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:4:"1100";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:3:"300";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:3:"100";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:3:"300";}}}i:24;a:3:{s:8:"sphinxql";s:22:"select * from u_dist23";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:4;a:3:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}i:5;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:6;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:7;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"attr";s:1:"0";}i:8;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"attr";s:1:"1";}}}i:25;a:3:{s:8:"sphinxql";s:29:"select * from u_float, u_uint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";}}}i:26;a:3:{s:8:"sphinxql";s:31:"select * from u_float, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";}}}i:27;a:3:{s:8:"sphinxql";s:29:"select * from u_uint, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";}}}i:28;a:3:{s:8:"sphinxql";s:31:"select * from u_bigint, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";}}}}i:1;a:29:{i:0;a:3:{s:8:"sphinxql";s:41:"select * from loc_dist1 order by str1 asc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:1;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:3;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:4;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:6;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:1;a:3:{s:8:"sphinxql";s:42:"select * from loc_dist1 order by str1 desc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:3;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:4;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:5;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:6;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:7;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}}}i:2;a:3:{s:8:"sphinxql";s:52:"select * from loc_dist2 order by str1 asc, idd1 desc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:1;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:2;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:9:"cc c main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:3;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:6;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:7;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:3;a:3:{s:8:"sphinxql";s:52:"select * from loc_dist2 order by str1 desc, idd1 asc";s:10:"total_rows";i:8;s:4:"rows";a:8:{i:0;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:9:"cc c main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:6;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:7;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}}}i:4;a:3:{s:8:"sphinxql";s:41:"select * from loc_dist2 order by str2 asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:5:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:5;a:3:{s:8:"sphinxql";s:42:"select * from loc_dist2 order by str2 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:5:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:6;a:3:{s:8:"sphinxql";s:53:"select * from loc_dist2 order by str2 desc, idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:5:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:7;a:3:{s:8:"sphinxql";s:48:"select * from dist1 order by str2 asc, idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:5:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}}}i:8;a:3:{s:8:"sphinxql";s:48:"select * from dist1 order by str2 desc, idd1 asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:5:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:1;a:5:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"str2";s:11:"c cc delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:5:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:5:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:5:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"str2";s:10:"a a delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:9;a:3:{s:8:"sphinxql";s:37:"select * from dist2 order by str1 asc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:2;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:5;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}}}i:10;a:3:{s:8:"sphinxql";s:38:"select * from dist2 order by str1 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:1;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:2;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:3;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"102";}i:4;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:6:"delta2";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"102";}i:5;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:11;a:3:{s:8:"sphinxql";s:37:"select * from dist3 order by str1 asc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:1;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:2;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:3;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:4;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:6;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:4:{s:2:"id";s:1:"2";s:4:"str1";s:9:"c cc main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:8;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}}}i:12;a:3:{s:8:"sphinxql";s:49:"select * from dist3 order by str1 desc, idd1 desc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:4:{s:2:"id";s:1:"4";s:4:"str1";s:11:"cc c delta1";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"101";}i:1;a:4:{s:2:"id";s:1:"2";s:4:"str1";s:9:"c cc main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:2;a:4:{s:2:"id";s:1:"9";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:3;a:4:{s:2:"id";s:1:"6";s:4:"str1";s:11:"c cc delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:4;a:4:{s:2:"id";s:1:"1";s:4:"str1";s:9:"a aa main";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"100";}i:5;a:4:{s:2:"id";s:1:"3";s:4:"str1";s:8:"a a main";s:4:"idd1";s:2:"11";s:3:"tag";s:3:"100";}i:6;a:4:{s:2:"id";s:1:"5";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"3";s:3:"tag";s:3:"101";}i:7;a:4:{s:2:"id";s:1:"8";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}i:8;a:4:{s:2:"id";s:1:"7";s:4:"str1";s:10:"a a delta1";s:4:"idd1";s:1:"1";s:3:"tag";s:3:"101";}}}i:13;a:3:{s:8:"sphinxql";s:29:"select * from u_bool, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:8:"0.578000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:8:"6.358400";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:8:"6.358400";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:8:"6.358400";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:8:"1.734100";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:8:"0.578000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:8:"0.578000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:8:"0.578000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:8:"1.734100";}}}i:14;a:3:{s:8:"sphinxql";s:29:"select * from u_float, u_bool";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:8:"0.000000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:8:"1.000000";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:8:"1.000000";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:8:"1.000000";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:8:"1.000000";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:8:"0.000000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:8:"0.000000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:8:"0.000000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:8:"1.000000";}}}i:15;a:3:{s:8:"sphinxql";s:38:"select * from u_bool, u_uint, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:13:"1100000000000";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:13:"1100000000000";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:13:"1100000000000";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:12:"300000000000";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:12:"100000000000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:12:"100000000000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:12:"100000000000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:12:"300000000000";}}}i:16;a:3:{s:8:"sphinxql";s:38:"select * from u_uint, u_bool, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:13:"1100000000000";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:13:"1100000000000";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:13:"1100000000000";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:12:"300000000000";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:12:"100000000000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:12:"100000000000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:12:"100000000000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:12:"300000000000";}}}i:17;a:3:{s:8:"sphinxql";s:38:"select * from u_uint, u_bigint, u_bool";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:1:"0";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:1:"0";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:1:"0";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:39:"select * from u_bigint, u_uint, u_bool ";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:1:"0";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:1:"0";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:1:"0";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:22:"select * from u_dist10";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:8:"0.578000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:8:"6.358400";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:8:"6.358400";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:8:"6.358400";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:8:"1.734100";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:8:"0.578000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:8:"0.578000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:8:"0.578000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:8:"1.734100";}}}i:20;a:3:{s:8:"sphinxql";s:22:"select * from u_dist11";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:8:"0.000000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:8:"1.000000";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:8:"1.000000";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:8:"1.000000";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:8:"1.000000";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:8:"0.000000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:8:"0.000000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:8:"0.000000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:8:"1.000000";}}}i:21;a:3:{s:8:"sphinxql";s:22:"select * from u_dist20";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:12:"100000000000";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:13:"1100000000000";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:13:"1100000000000";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:13:"1100000000000";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:12:"300000000000";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:12:"100000000000";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:12:"100000000000";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:12:"100000000000";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:12:"300000000000";}}}i:22;a:3:{s:8:"sphinxql";s:22:"select * from u_dist21";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:3:"100";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:4:"1100";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:4:"1100";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:4:"1100";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:3:"300";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:3:"100";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:3:"100";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:3:"100";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:3:"300";}}}i:23;a:3:{s:8:"sphinxql";s:22:"select * from u_dist22";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:3:"100";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:4:"1100";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:4:"1100";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:4:"1100";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:3:"300";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:3:"100";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:3:"100";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:3:"100";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:3:"300";}}}i:24;a:3:{s:8:"sphinxql";s:22:"select * from u_dist23";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:4:"attr";s:1:"0";}i:1;a:2:{s:2:"id";s:1:"2";s:4:"attr";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:4:"attr";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:4:"attr";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:4:"attr";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:4:"attr";s:1:"0";}i:6;a:2:{s:2:"id";s:1:"7";s:4:"attr";s:1:"0";}i:7;a:2:{s:2:"id";s:1:"8";s:4:"attr";s:1:"0";}i:8;a:2:{s:2:"id";s:1:"9";s:4:"attr";s:1:"1";}}}i:25;a:3:{s:8:"sphinxql";s:29:"select * from u_float, u_uint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:1:{s:2:"id";s:1:"1";}i:1;a:1:{s:2:"id";s:1:"2";}i:2;a:1:{s:2:"id";s:1:"3";}i:3;a:1:{s:2:"id";s:1:"4";}i:4;a:1:{s:2:"id";s:1:"5";}i:5;a:1:{s:2:"id";s:1:"6";}i:6;a:1:{s:2:"id";s:1:"7";}i:7;a:1:{s:2:"id";s:1:"8";}i:8;a:1:{s:2:"id";s:1:"9";}}}i:26;a:3:{s:8:"sphinxql";s:31:"select * from u_float, u_bigint";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:1:{s:2:"id";s:1:"1";}i:1;a:1:{s:2:"id";s:1:"2";}i:2;a:1:{s:2:"id";s:1:"3";}i:3;a:1:{s:2:"id";s:1:"4";}i:4;a:1:{s:2:"id";s:1:"5";}i:5;a:1:{s:2:"id";s:1:"6";}i:6;a:1:{s:2:"id";s:1:"7";}i:7;a:1:{s:2:"id";s:1:"8";}i:8;a:1:{s:2:"id";s:1:"9";}}}i:27;a:3:{s:8:"sphinxql";s:29:"select * from u_uint, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:1:{s:2:"id";s:1:"1";}i:1;a:1:{s:2:"id";s:1:"2";}i:2;a:1:{s:2:"id";s:1:"3";}i:3;a:1:{s:2:"id";s:1:"4";}i:4;a:1:{s:2:"id";s:1:"5";}i:5;a:1:{s:2:"id";s:1:"6";}i:6;a:1:{s:2:"id";s:1:"7";}i:7;a:1:{s:2:"id";s:1:"8";}i:8;a:1:{s:2:"id";s:1:"9";}}}i:28;a:3:{s:8:"sphinxql";s:31:"select * from u_bigint, u_float";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:1:{s:2:"id";s:1:"1";}i:1;a:1:{s:2:"id";s:1:"2";}i:2;a:1:{s:2:"id";s:1:"3";}i:3;a:1:{s:2:"id";s:1:"4";}i:4;a:1:{s:2:"id";s:1:"5";}i:5;a:1:{s:2:"id";s:1:"6";}i:6;a:1:{s:2:"id";s:1:"7";}i:7;a:1:{s:2:"id";s:1:"8";}i:8;a:1:{s:2:"id";s:1:"9";}}}}}sphinx-2.0.4-release/test/test_117/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_117/test.xml0000644000176700017710000000507611421075337020005 0ustar deogardeogar RT: ram vs disk chunk search searchd { workers = threads binlog_path = } index rt0 { docinfo = extern morphology = none charset_type = utf-8 type = rt rt_field = title rt_field = content rt_attr_uint = id1 rt_attr_uint = id2 path = /index rt_mem_limit = 128K } drop table if exists rt0 create table rt0 ( id int not null, title varchar(255) not null, content varchar(255) not null, id1 int, id2 int ); insert into rt0 values ( 1000000, 'first title', 'first content', 1000000, 1000000 ); $value) $foo[$key] = $value; $results[] = $foo; } } } @mysql_close($sock); ]]> sphinx-2.0.4-release/test/test_117/model.bin0000644000176700017710000000060611455516446020100 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:5:{i:0;s:44:"query 0: total_rows=2 str=match('submarine')";i:1;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"2774";s:3:"id1";s:2:"11";s:3:"id2";s:2:"11";}i:2;a:4:{s:2:"id";s:3:"301";s:6:"weight";s:4:"2774";s:3:"id1";s:2:"77";s:3:"id2";s:2:"77";}i:3;s:30:"query 1: total_rows=1 str=id=1";i:4;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"id1";s:2:"11";s:3:"id2";s:2:"11";}}}}sphinx-2.0.4-release/test/test_129/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_129/test.xml0000644000176700017710000000340311605620330017771 0ustar deogardeogar MVA updates indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT id, text, mva, tag FROM test_table sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field sql_attr_uint = tag } index idx { source = src path = /main charset_type = utf-8 docinfo = extern } Query ( "" ); if ( !$results[0] ) { $results = false; // maybe its temporary; lets retry return; } for ( $i=0; $i<2; $i++ ) { $updata = null; if ( $i==0 ) $updata = array(1=>array(1001), 3=>array(3001), 2=>array(2001)); else $updata = array(1=>array(array(101)), 3=>array(array(301, 302, 303)),2=>array(array(201, 202))); $up = $client->UpdateAttributes ( "idx", array("mva"), $updata, $i==1 ); $results[] = sprintf("up.count=%d", $up); $results[] = $client->Query ( "" ); } // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `id` int(11), `text` varchar(255) NOT NULL, `mva` varchar(255) NOT NULL, `tag` int(11) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test1', '1', 11 ), ( 2, 'test2', '2,3', 22 ), ( 3, 'test3', '4,5,6', 33 ), ( 4, 'test4', '7,8,9,10', 44 ), ( 5, 'test5', '', 55 ) sphinx-2.0.4-release/test/test_129/model.bin0000644000176700017710000001033011605620330020057 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:5:{i:0;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}i:1;s:11:"up.count=-1";i:2;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}i:3;s:10:"up.count=3";i:4;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:3:"101";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:3:"201";i:1;s:3:"202";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:3:"301";i:1;s:3:"302";i:2;s:3:"303";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}}}i:1;a:1:{i:0;a:5:{i:0;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}i:1;s:11:"up.count=-1";i:2;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}i:3;s:10:"up.count=3";i:4;a:5:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:3:"101";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:3:"201";i:1;s:3:"202";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:3:"301";i:1;s:3:"302";i:2;s:3:"303";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";}}}}sphinx-2.0.4-release/test/test_109/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_109/test.xml0000644000176700017710000000650611550552201017776 0ustar deogardeogar RT: kill-list and segments merge searchd { workers = threads binlog_path = } index test { type = rt path = /testrt123 rt_mem_limit = 128K rt_attr_uint = group_id rt_field = title rt_field = content } index seg1 { type = rt path = /seg1 rt_mem_limit = 32K rt_attr_uint = gid rt_field = body } indexer { mem_limit = 16M } source dummysrc { type = mysql sql_query = SELECT * FROM test_table where id=1 } index dummysrcmain { source = dummysrc path = /dummysrcmain } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, title varchar(255) NOT NULL ); DROP TABLE IF EXISTS test_table; $value) $foo[$key] = $value; $results[] = $foo; } } } $replaced = 0; $queried = 0; for ( $i=0; $i<50; $i++ ) { $res = array (); $res[] = @mysql_query ( "REPLACE INTO seg1 ( id, gid, body ) VALUES ( $i, $i, 'text $i' )" ); $res[] = @mysql_query ( "REPLACE INTO seg1 ( id, gid, body ) VALUES ( $i, $i, 'text $i' )" ); foreach ( $res as $r ) { if ($r===true) $replaced++; } $rq = @mysql_query ( "SELECT * FROM seg1 WHERE MATCH('text')" ); $queried += mysql_num_rows ($rq); } $results[] = "segments merge: replaced=$replaced, queried=$queried"; @mysql_close($sock); ]]> sphinx-2.0.4-release/test/test_109/model.bin0000644000176700017710000000054011550552201020057 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:6:{i:0;s:18:"total inserted=949";i:1;s:27:"id=21 query 0: total_rows=1";i:2;a:3:{s:2:"id";s:2:"21";s:6:"weight";s:1:"1";s:8:"group_id";s:5:"10900";}i:3;s:37:"match('gooddy') query 1: total_rows=1";i:4;a:3:{s:2:"id";s:2:"21";s:6:"weight";s:4:"1727";s:8:"group_id";s:5:"10900";}i:5;s:41:"segments merge: replaced=100, queried=790";}}}sphinx-2.0.4-release/test/test_011/0000755000176700017710000000000011724063141016261 5ustar deogardeogarsphinx-2.0.4-release/test/test_011/test.xml0000644000176700017710000000332711565476572020012 0ustar deogardeogar postfixes indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_infix_len = 0 min_infix_len = 1 min_infix_len = 3 enable_star = 0 enable_star = 1 } admin *eat* *search *dmin *rep * CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_011/model.bin0000644000176700017710000003373411565476572020110 0ustar deogardeogara:6:{i:0;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"eat";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"search";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}i:1;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"eat";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"search";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}i:2;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"eat";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"search";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}i:3;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"eat";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"search";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}i:4;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*eat*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"*search";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"*rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}i:5;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"admin";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*eat*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*eat*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"*search";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"*search";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*dmin";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"*rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"*rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"*";}}}sphinx-2.0.4-release/test/test_097/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_097/test.xml0000644000176700017710000000317211301364756020012 0ustar deogardeogar cutoff indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id, document_id+10 gid, title FROM test_table sql_attr_uint = gid } index test { source = srctest path = /test } test test CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `title` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO test_table VALUES ( 1, 'test' ), ( 2, 'test' ), ( 3, 'test' ), ( 4, 'it' ) INSERT INTO test_table SELECT document_id+4, title FROM test_table INSERT INTO test_table SELECT document_id+8, title FROM test_table INSERT INTO test_table SELECT document_id+16, title FROM test_table INSERT INTO test_table SELECT document_id+32, title FROM test_table INSERT INTO test_table SELECT document_id+64, title FROM test_table INSERT INTO test_table SELECT document_id+128, title FROM test_table INSERT INTO test_table SELECT document_id+256, title FROM test_table INSERT INTO test_table SELECT document_id+512, title FROM test_table sphinx-2.0.4-release/test/test_097/model.bin0000644000176700017710000001024511301364756020102 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:20:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"12";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"13";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"14";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"15";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"16";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"17";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"18";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"19";}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"20";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"21";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"22";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"23";}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"24";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"25";}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"26";}}i:17;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"27";}}i:18;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"28";}}i:19;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"29";}}i:20;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"30";}}}s:5:"total";s:4:"1000";s:11:"total_found";s:4:"1024";s:4:"time";s:5:"0.008";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:20:{i:1;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"11";}}i:2;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"12";}}i:3;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"13";}}i:5;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"15";}}i:6;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"16";}}i:7;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"17";}}i:9;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"19";}}i:10;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"20";}}i:11;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"21";}}i:13;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"23";}}i:14;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"24";}}i:15;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"25";}}i:17;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"27";}}i:18;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"28";}}i:19;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"29";}}i:21;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"31";}}i:22;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"32";}}i:23;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"33";}}i:25;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"35";}}i:26;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"36";}}}s:5:"total";s:3:"768";s:11:"total_found";s:3:"768";s:4:"time";s:5:"0.007";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:3:"768";s:4:"hits";s:3:"768";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"11";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1464";s:5:"attrs";a:1:{s:3:"gid";s:2:"11";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:3:"768";s:4:"hits";s:3:"768";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}}}sphinx-2.0.4-release/test/test_009/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_009/test.xml0000644000176700017710000001125211503513132017765 0ustar deogardeogar attributes over network indexer { mem_limit = 16M } searchd { } source src1 { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } source src2 { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } source src3 { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } source src4 { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } index block1 { source = src1 path = /block1 docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } index block2 { source = src2 path = /block2 docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } index block3 { source = src3 path = /block3 docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } index block4 { source = src4 path = /block4 docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } index dist { type = distributed local = block1 agent = :block2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } 2 lng lat @geodist $client->SetGeoAnchor ('lng','lat',-0.0798578,0.937717 ); $client->SetSortMode ( SPH_SORT_EXTENDED, '@geodist DESC' ); $results = $client->Query ( $query, 'dist' ); CREATE TABLE `test_table` ( `id` int(11) NOT NULL auto_increment, `section` int(11) NOT NULL, `system_id` tinyint(4) NOT NULL, `adtext` varchar(255) NOT NULL, `heading` varchar(500) NOT NULL, `price` int(11) NOT NULL default '0', `postcode` varchar(10) NOT NULL, `gre` int(11) NOT NULL, `grn` int(11) NOT NULL, `str_at1` varchar(255) NOT NULL, `str_at2` varchar(255) NOT NULL, `str_at3` varchar(255) NOT NULL, `str_at4` varchar(255) NOT NULL, `str_at5` varchar(255) NOT NULL, `int_at1` int(11) default NULL, `int_at2` int(11) default NULL, `int_at3` int(11) default NULL, `int_at4` int(11) default NULL, `int_at5` int(11) default NULL, `float_at1` float default NULL, `float_at2` float default NULL, `float_at3` float default NULL, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', `make_id` int(11) NOT NULL, `transmission_id` tinyint(4) NOT NULL, PRIMARY KEY (`id`) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `section`, `system_id`, `adtext`, `heading`, `price`, `postcode`, `gre`, `grn`, `str_at1`, `str_at2`, `str_at3`, `str_at4`, `str_at5`, `int_at1`, `int_at2`, `int_at3`, `int_at4`, `int_at5`, `float_at1`, `float_at2`, `float_at3`, `lng`, `lat`, `make_id`, `transmission_id`) VALUES (1, 1, 2, 'FORD', 'Ford KA', 2790, 'EN3 5BT', 535000, 197400, 'Ford', 'KA', 'Grey', 'Diesel', '', 18662, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0798578, 0.937717, 8, 1), (2, 1, 0, 'until', 'Vauxhall Corsa', 5800, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0799989, 0.891975, 5, 0), (211250, 0, 1, 'Quattro Roadster', 'Audi TT', 13995, 'E9 7DG', 535600, 184200, '', '', '', '', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0721455, 0.926761, 29, 1); sphinx-2.0.4-release/test/test_009/model.bin0000644000176700017710000000211011455516446020070 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:6:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:7:"make_id";i:1;s:15:"transmission_id";i:1;s:8:"@geodist";i:5;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"5";s:15:"transmission_id";s:1:"0";s:8:"@geodist";d:291086.09375;}}i:211250;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:7:"make_id";s:2:"29";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:85369.6015625;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"8";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:0;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.026";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_090/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_090/test.xml0000644000176700017710000000255011421075337017777 0ustar deogardeogar SphinxQL: minus sign padding indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_attr_uint = intt rt_field = body } insert into test (id,body) values (1,'value') select (3-1) as a from test select (3 -1) as a from test select (3- 1) as a from test select (3-0.1) as a from test select (3- 0.1) as a from test select (3 -0.1) as a from test select (3--1) as a from test select (3 --1) as a from test select (3-- 1) as a from test select (3--0.1) as a from test select (3-- 0.1) as a from test select (3 --0.1) as a from test select (3- -1) as a from test select (3 - -1) as a from test select (3- - 1) as a from test select (3- -0.1) as a from test select (3- - 0.1) as a from test select (3 - -0.1) as a from test sphinx-2.0.4-release/test/test_090/model.bin0000644000176700017710000000614711455516446020106 0ustar deogardeogara:1:{i:0;a:19:{i:0;a:2:{s:8:"sphinxql";s:45:"insert into test (id,body) values (1,'value')";s:14:"total_affected";i:1;}i:1;a:3:{s:8:"sphinxql";s:27:"select (3-1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"2";}}}i:2;a:3:{s:8:"sphinxql";s:28:"select (3 -1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"2";}}}i:3;a:3:{s:8:"sphinxql";s:28:"select (3- 1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"2";}}}i:4;a:3:{s:8:"sphinxql";s:29:"select (3-0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"2.900000";}}}i:5;a:3:{s:8:"sphinxql";s:30:"select (3- 0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"2.900000";}}}i:6;a:3:{s:8:"sphinxql";s:30:"select (3 -0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"2.900000";}}}i:7;a:3:{s:8:"sphinxql";s:28:"select (3--1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:8;a:3:{s:8:"sphinxql";s:29:"select (3 --1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:9;a:3:{s:8:"sphinxql";s:29:"select (3-- 1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:10;a:3:{s:8:"sphinxql";s:30:"select (3--0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}i:11;a:3:{s:8:"sphinxql";s:31:"select (3-- 0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}i:12;a:3:{s:8:"sphinxql";s:31:"select (3 --0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}i:13;a:3:{s:8:"sphinxql";s:29:"select (3- -1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:14;a:3:{s:8:"sphinxql";s:30:"select (3 - -1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:15;a:3:{s:8:"sphinxql";s:30:"select (3- - 1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:1:"4";}}}i:16;a:3:{s:8:"sphinxql";s:31:"select (3- -0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}i:17;a:3:{s:8:"sphinxql";s:32:"select (3- - 0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}i:18;a:3:{s:8:"sphinxql";s:32:"select (3 - -0.1) as a from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:1:"a";s:8:"3.100000";}}}}}sphinx-2.0.4-release/test/test_130/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_130/load_file.txt0000644000176700017710000000006711417330440020743 0ustar deogardeogarThat paper also reminds me end point as my cool friend.sphinx-2.0.4-release/test/test_130/test.xml0000644000176700017710000000120611662472433017774 0ustar deogardeogar snippets vs load_files searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 } select 1; true, 'limit'=>0 ); $results[] = $client->BuildExcerpts($docs, 'test', 'end point', $opts ); $results[] = $client->BuildExcerpts($docs, 'test', 'not_found', $opts ); ]]> sphinx-2.0.4-release/test/test_130/model.bin0000644000176700017710000000030211417330440020046 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:2:{i:0;a:1:{i:0;s:69:"That paper also reminds me end point as my cool friend.";}i:1;a:1:{i:0;s:55:"That paper also reminds me end point as my cool friend.";}}}}sphinx-2.0.4-release/test/test_114/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_114/test.xml0000644000176700017710000000224611503513132017765 0ustar deogardeogar phrase matching vs hit buffer boundary indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table } index idx { source = src path = /main charset_type = utf-8 } "aaaa bbbb" "cccc dddd" CREATE TABLE test_table ( id INTEGER AUTO_INCREMENT PRIMARY KEY NOT NULL, title TEXT NOT NULL ) ENGINE=MYISAM DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_114/model.bin0000644000176700017710000000543611342762072020074 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:20:{i:511;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:17;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:18;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:19;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:3:"511";s:11:"total_found";s:3:"511";s:4:"time";s:5:"0.004";s:5:"words";a:2:{s:4:"aaaa";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:3:"512";}s:4:"bbbb";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:3:"512";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:""aaaa bbbb"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:20:{i:511;a:2:{s:6:"weight";s:3:"520";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:17;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:18;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:19;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:3:"511";s:11:"total_found";s:3:"511";s:4:"time";s:5:"0.004";s:5:"words";a:2:{s:4:"cccc";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:4:"1030";}s:4:"dddd";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:4:"1030";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:""cccc dddd"";}}}sphinx-2.0.4-release/test/test_094/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_094/test.xml0000644000176700017710000000147411503513132017776 0ustar deogardeogar proximity queries indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 } "one two"~10 CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` VARCHAR(16384) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` SELECT 1, REPEAT('one two ',257) UNION SELECT 2, 'two one' sphinx-2.0.4-release/test/test_094/model.bin0000644000176700017710000000102411301013275020055 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:6:"255186";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1356";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"258";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"258";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""one two"~10";}}}sphinx-2.0.4-release/test/test_073/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_073/test.xml0000644000176700017710000000265611244023256020003 0ustar deogardeogar subtree caching (part 3) indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 10M subtree_hits_cache = 0 subtree_hits_cache = 40 subtree_hits_cache = 10M } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one' ), ( 2, 'one two' ), ( 3, 'one two three' ), ( 4, 'one two three four' ), ( 5, 'one two three four five' ) AddQuery ('one'); $client->AddQuery ('one two'); $client->AddQuery ('one two three'); $client->AddQuery ('two three four'); $client->AddQuery ('four five'); $results = $client->RunQueries (); for ( $i=0; $i<=4; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_073/model.bin0000644000176700017710000005161611227074350020076 0ustar deogardeogara:9:{i:0;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:2;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:3;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:4;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:5;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:6;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:7;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:8;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:2:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_119/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_119/test.xml0000644000176700017710000000655511641335771020017 0ustar deogardeogar select expressions vs eval stages indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT * FROM test_table WHERE id IN (1,2, 3, 4 ) sql_attr_uint = ival sql_attr_float = fval } index test { source = srctest path = /test } source src_mva { type = mysql sql_query = SELECT id, 1 as idd, title as mva, 'test' FROM test_table WHERE id=10 sql_attr_uint = idd sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field } index mva { source = src_mva path = /mva } source src_final { type = mysql sql_query = SELECT id, ival as idd1, id*1000 as idd2, 'test' FROM test_table sql_attr_uint = idd1 sql_attr_uint = idd2 } index final { source = src_final path = /final docinfo = extern } CREATE TABLE test_table ( id INTEGER NOT NULL, ival INTEGER NOT NULL, fval INTEGER NOT NULL, title VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 11, 10, 'test one' ), ( 2, 11, 20, 'test two' ), ( 3, 11, -30, 'test three' ), ( 4, 22, -40, 'test four' ), ( 10, 1, 1, '10,11,12,13' ), ( 20, 200, 1, '1' ), ( 21, 201, 1, '1' ), ( 22, 202, 1, '1' ), ( 23, 203, 1, '1' ), ( 24, 204, 1, '1' ), ( 25, 205, 1, '1' ), ( 30, 300, 1, '1' ), ( 31, 301, 1, '1' ), ( 32, 302, 1, '1' ), ( 33, 303, 1, '1' ), ( 34, 304, 1, '1' ), ( 35, 305, 1, '1' ) select * from test where match('test') order by fval asc select *, fval+1 as f1 from test where match('test') order by f1 desc select * from test group by fval select *, ival-1 as i1 from test group by i1 select *, @weight+ival as i1 from test where match('test') select *, 10+ival as i1, 50+i1 as i2 from test where match('test') select *, 10+ival as i1, 50+i1 as i2 from test select *, 10+ival as i1, 50+i1 as i2, i2*10 as i3 from test select *, 10+ival as i1, 50+i1 as i2, i2*10-i1 as i3 from test select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i3 asc select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i1 asc, i3 desc select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test group by i2 select *, 10+ival as i1, 50+i1 as i2, i2*@id as i3 from test group by i3 select *, IN ( mva, 11 ) as cnd1 from mva where cnd1=1 select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 0,2 option max_matches=4 select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 2,2 option max_matches=4 sphinx-2.0.4-release/test/test_119/model.bin0000644000176700017710000005145011641335771020102 0ustar deogardeogara:2:{i:0;a:17:{i:0;a:3:{s:8:"sphinxql";s:56:"select * from test where match('test') order by fval asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";}i:2;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";}i:3;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";}}}i:1;a:3:{s:8:"sphinxql";s:69:"select *, fval+1 as f1 from test where match('test') order by f1 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"f1";s:9:"21.000000";}i:1;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"f1";s:9:"11.000000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"f1";s:10:"-29.000000";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"f1";s:10:"-39.000000";}}}i:2;a:3:{s:8:"sphinxql";s:32:"select * from test group by fval";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:8:"@groupby";s:10:"1092616192";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:8:"@groupby";s:10:"1101004800";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:8:"@groupby";s:10:"3253731328";s:6:"@count";s:1:"1";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:8:"@groupby";s:10:"3256877056";s:6:"@count";s:1:"1";}}}i:3;a:3:{s:8:"sphinxql";s:44:"select *, ival-1 as i1 from test group by i1";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"10";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"3";}i:1;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"21";s:8:"@groupby";s:2:"21";s:6:"@count";s:1:"1";}}}i:4;a:3:{s:8:"sphinxql";s:58:"select *, @weight+ival as i1 from test where match('test')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:4:"1315";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:4:"1315";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:4:"1315";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:4:"1326";}}}i:5;a:3:{s:8:"sphinxql";s:66:"select *, 10+ival as i1, 50+i1 as i2 from test where match('test')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";}}}i:6;a:3:{s:8:"sphinxql";s:46:"select *, 10+ival as i1, 50+i1 as i2 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";}}}i:7;a:3:{s:8:"sphinxql";s:59:"select *, 10+ival as i1, 50+i1 as i2, i2*10 as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"820";}}}i:8;a:3:{s:8:"sphinxql";s:62:"select *, 10+ival as i1, 50+i1 as i2, i2*10-i1 as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"788";}}}i:9;a:3:{s:8:"sphinxql";s:61:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}}}i:10;a:3:{s:8:"sphinxql";s:77:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i3 asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}i:1;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:2;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:3;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}}}i:11;a:3:{s:8:"sphinxql";s:86:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i1 asc, i3 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}i:1;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}}}i:12;a:3:{s:8:"sphinxql";s:73:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test group by i2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";s:8:"@groupby";s:2:"71";s:6:"@count";s:1:"3";}i:1;a:9:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";s:8:"@groupby";s:2:"82";s:6:"@count";s:1:"1";}}}i:13;a:3:{s:8:"sphinxql";s:72:"select *, 10+ival as i1, 50+i1 as i2, i2*@id as i3 from test group by i3";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:2:"71";s:8:"@groupby";s:2:"71";s:6:"@count";s:1:"1";}i:1;a:9:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"142";s:8:"@groupby";s:3:"142";s:6:"@count";s:1:"1";}i:2;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"213";s:8:"@groupby";s:3:"213";s:6:"@count";s:1:"1";}i:3;a:9:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"328";s:8:"@groupby";s:3:"328";s:6:"@count";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:54:"select *, IN ( mva, 11 ) as cnd1 from mva where cnd1=1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:3:"mva";s:11:"10,11,12,13";s:4:"cnd1";s:1:"1";}}}i:15;a:3:{s:8:"sphinxql";s:113:"select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 0,2 option max_matches=4";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:2:"35";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"305";s:4:"idd2";s:5:"35000";s:2:"i2";s:5:"35305";}i:1;a:5:{s:2:"id";s:2:"34";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"304";s:4:"idd2";s:5:"34000";s:2:"i2";s:5:"34304";}}}i:16;a:3:{s:8:"sphinxql";s:113:"select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 2,2 option max_matches=4";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:2:"33";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"303";s:4:"idd2";s:5:"33000";s:2:"i2";s:5:"33303";}i:1;a:5:{s:2:"id";s:2:"32";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"302";s:4:"idd2";s:5:"32000";s:2:"i2";s:5:"32302";}}}}i:1;a:17:{i:0;a:3:{s:8:"sphinxql";s:56:"select * from test where match('test') order by fval asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";}i:2;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";}i:3;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";}}}i:1;a:3:{s:8:"sphinxql";s:69:"select *, fval+1 as f1 from test where match('test') order by f1 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"f1";s:9:"21.000000";}i:1;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"f1";s:9:"11.000000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"f1";s:10:"-29.000000";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"f1";s:10:"-39.000000";}}}i:2;a:3:{s:8:"sphinxql";s:32:"select * from test group by fval";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:8:"@groupby";s:10:"1092616192";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:8:"@groupby";s:10:"1101004800";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:8:"@groupby";s:10:"3253731328";s:6:"@count";s:1:"1";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:8:"@groupby";s:10:"3256877056";s:6:"@count";s:1:"1";}}}i:3;a:3:{s:8:"sphinxql";s:44:"select *, ival-1 as i1 from test group by i1";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"10";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"3";}i:1;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"21";s:8:"@groupby";s:2:"21";s:6:"@count";s:1:"1";}}}i:4;a:3:{s:8:"sphinxql";s:58:"select *, @weight+ival as i1 from test where match('test')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:4:"1315";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:4:"1315";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:4:"1315";}i:3;a:5:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:4:"1326";}}}i:5;a:3:{s:8:"sphinxql";s:66:"select *, 10+ival as i1, 50+i1 as i2 from test where match('test')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1304";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";}}}i:6;a:3:{s:8:"sphinxql";s:46:"select *, 10+ival as i1, 50+i1 as i2 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";}}}i:7;a:3:{s:8:"sphinxql";s:59:"select *, 10+ival as i1, 50+i1 as i2, i2*10 as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"710";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"820";}}}i:8;a:3:{s:8:"sphinxql";s:62:"select *, 10+ival as i1, 50+i1 as i2, i2*10-i1 as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"689";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"788";}}}i:9;a:3:{s:8:"sphinxql";s:61:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:1;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}}}i:10;a:3:{s:8:"sphinxql";s:77:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i3 asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}i:1;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:2;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:3;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}}}i:11;a:3:{s:8:"sphinxql";s:86:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test order by i1 asc, i3 desc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:7:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:11:"1420.000000";}i:1;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";}i:2;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:12:"-2130.000000";}i:3;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";}}}i:12;a:3:{s:8:"sphinxql";s:73:"select *, 10+ival as i1, 50+i1 as i2, i2*fval as i3 from test group by i2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:10:"710.000000";s:8:"@groupby";s:2:"71";s:6:"@count";s:1:"3";}i:1;a:9:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:12:"-3280.000000";s:8:"@groupby";s:2:"82";s:6:"@count";s:1:"1";}}}i:13;a:3:{s:8:"sphinxql";s:72:"select *, 10+ival as i1, 50+i1 as i2, i2*@id as i3 from test group by i3";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"10.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:2:"71";s:8:"@groupby";s:2:"71";s:6:"@count";s:1:"1";}i:1;a:9:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:9:"20.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"142";s:8:"@groupby";s:3:"142";s:6:"@count";s:1:"1";}i:2;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"ival";s:2:"11";s:4:"fval";s:10:"-30.000000";s:2:"i1";s:2:"21";s:2:"i2";s:2:"71";s:2:"i3";s:3:"213";s:8:"@groupby";s:3:"213";s:6:"@count";s:1:"1";}i:3;a:9:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"ival";s:2:"22";s:4:"fval";s:10:"-40.000000";s:2:"i1";s:2:"32";s:2:"i2";s:2:"82";s:2:"i3";s:3:"328";s:8:"@groupby";s:3:"328";s:6:"@count";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:54:"select *, IN ( mva, 11 ) as cnd1 from mva where cnd1=1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:5:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:3:"mva";s:11:"10,11,12,13";s:4:"cnd1";s:1:"1";}}}i:15;a:3:{s:8:"sphinxql";s:113:"select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 0,2 option max_matches=4";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:2:"35";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"305";s:4:"idd2";s:5:"35000";s:2:"i2";s:5:"35305";}i:1;a:5:{s:2:"id";s:2:"34";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"304";s:4:"idd2";s:5:"34000";s:2:"i2";s:5:"34304";}}}i:16;a:3:{s:8:"sphinxql";s:113:"select id, idd1, idd2, idd1 + idd2 as i2 from final group by idd1 order by id desc limit 2,2 option max_matches=4";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:2:"33";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"303";s:4:"idd2";s:5:"33000";s:2:"i2";s:5:"33303";}i:1;a:5:{s:2:"id";s:2:"32";s:6:"weight";s:1:"1";s:4:"idd1";s:3:"302";s:4:"idd2";s:5:"32000";s:2:"i2";s:5:"32302";}}}}}sphinx-2.0.4-release/test/test_087/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_087/test.xml0000644000176700017710000000213211421075337020001 0ustar deogardeogar SphinxQL: INSERT with column list indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_attr_uint = gid rt_field = title rt_field = content } insert into test values (1,'title one','content one',3) insert into test (id,content) values (2,'query defined') select * from test where match('content') select * from test where match('query') select * from test insert into test values (5,6,7,8) insert into test values (5,6,7,8,9,10,11,12) insert into test ( id, gid, gid) values ( 123, 456, 789 ) insert into test (id, title, fakeint, fakefloat, fakestring) values (10, 'title10',1,.34,'fuck') sphinx-2.0.4-release/test/test_087/model.bin0000644000176700017710000000271611455516446020112 0ustar deogardeogara:1:{i:0;a:9:{i:0;a:2:{s:8:"sphinxql";s:55:"insert into test values (1,'title one','content one',3)";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:56:"insert into test (id,content) values (2,'query defined')";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:41:"select * from test where match('content')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";s:3:"gid";s:1:"3";}}}i:3;a:3:{s:8:"sphinxql";s:39:"select * from test where match('query')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1643";s:3:"gid";s:1:"0";}}}i:4;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:1:"0";}}}i:5;a:3:{s:8:"sphinxql";s:33:"insert into test values (5,6,7,8)";s:5:"error";s:32:"row 1, column 2: string expected";s:5:"errno";i:1064;}i:6;a:3:{s:8:"sphinxql";s:44:"insert into test values (5,6,7,8,9,10,11,12)";s:5:"error";s:54:"column count does not match schema (expected 4, got 8)";s:5:"errno";i:1064;}i:7;a:3:{s:8:"sphinxql";s:57:"insert into test ( id, gid, gid) values ( 123, 456, 789 )";s:5:"error";s:28:"column 'gid' specified twice";s:5:"errno";i:1110;}i:8;a:3:{s:8:"sphinxql";s:96:"insert into test (id, title, fakeint, fakefloat, fakestring) values (10, 'title10',1,.34,'fuck')";s:5:"error";s:25:"unknown column: 'fakeint'";s:5:"errno";i:1064;}}}sphinx-2.0.4-release/test/helpers.inc0000644000176700017710000017062011723113351017062 0ustar deogardeogar $error_file\"", "r" ); pclose ( $process ); } else system ( "$path --config $config_file $test_mode > $error_file", $retval ); // wait until pid appears for ( $i=0; $i<$action_retries && !file_exists($pidfile); $i++ ) usleep ( $action_wait_timeout ); if ( !file_exists($pidfile) ) { $error = "PID file ($pidfile) was not created"; return 1; } // check for early crash $error = CheckSearchdLog ( $error_file, $retval ); // on windows, searchd starts *fully* async // so lets also wait until pidfile gets real data // (meaning that index precaching is actually done) if ( $retval!=1 && $windows ) { $STARTUP_TRIES = 1000; $STARTUP_TICK = 50000; // msec // FIXME! add a better check that searchd is still alive than just file_exists for ( $i=0; $i<$STARTUP_TRIES && file_exists($pidfile); $i++ ) { $pid = file($pidfile); if ( count($pid) ) break; usleep ( $STARTUP_TICK ); } } // // we've got a pid file; but lets check the log file for startup errors // for ( $i=0; $i<$action_retries && !file_exists($error_file); $i++ ) // usleep ( $action_wait_timeout ); // lets wait when daemon is ready to accept connections if ( $retval==0 ) { if ( !$addr ) $addr = $sd_address; if ( !$port ) $port = $sd_port; $cl = new SphinxClient (); $cl->SetServer ( $addr, $port ); $cl->SetConnectTimeout ( 10 ); $ok = false; $start = MyMicrotime(); for ( $i=0; $i<300; $i++ ) { if ( $cl->Open() ) { $cl->Close(); $ok = true; break; } usleep ( 500 ); } if ( !$ok ) { $tm = ( MyMicrotime() - $start ); printf ( "\nWARNING: can't connect to daemon on startup for %.1f sec\t\t\t\n", $tm ); } } if ( $retval==0 && !empty($error) ) $retval = 2; // no errors, but there were warnings return $retval; } function StopSearchd ( $config, $pidfile ) { global $g_locals, $action_retries, $action_wait_timeout; if ( file_exists($pidfile) && count(file($pidfile)) ) { $path = $g_locals['searchd']; exec ( "$path --config $config --stop" ); $i = 0; while ( file_exists ( $pidfile ) && $i < $action_retries ) { usleep ( $action_wait_timeout ); $i++; } } } function StopWaitSearchd ( $config, $pidfile ) { global $g_locals, $action_retries, $action_wait_timeout; $ret = 0; if ( file_exists($pidfile) && count(file($pidfile)) ) { $path = $g_locals['searchd']; $dummy = array(); exec ( "$path --config $config --stopwait", $dummy, $ret ); } return $ret; } function KillSearchd ( $config, $pidfile, $signal, $unlinkpid=True ) { global $windows, $action_wait_timeout; if ( file_exists($pidfile) && count(file($pidfile)) ) { if ( !$windows ) { $fp = fopen($pidfile,"r"); $pid = fread ( $fp, filesize ( $pidfile ) ); fclose ($fp); exec ("kill -s $signal $pid"); if ( $unlinkpid && file_exists ( $pidfile ) ) { usleep ( $action_wait_timeout ); unlink ( $pidfile ); } } else StopSearchd ($config, $pidfile); } } function IsModelGenMode () { global $g_model; return $g_model; } function CompareResultSetFixup ( &$set, $roundoff, $variants_match ) { global $g_ignore_weights; if ( !is_array($set) ) return; if ( $roundoff && !@$set["resarray"] ) // FIXME! support resarray too foreach ( $set["attrs"] as $name=>$type ) if ( $type==SPH_ATTR_FLOAT ) { foreach ( $set["matches"] as $id=>$match ) $set["matches"][$id]["attrs"][$name] = sprintf ( "%.{$roundoff}f", $set["matches"][$id]["attrs"][$name] ); } if ( $g_ignore_weights ) { if ( isset($set["matches"]) ) { if ( @$set["resarray"] ) { for ( $i=0; $i$match ) unset ( $set["matches"][$id]["weight"] ); } } if ( @$set["words"] ) foreach ( $set["words"] as $word=>$info ) $set["words"][$word] = array ( "hits"=>-1, "docs"=>-1 ); } //foreach ( preg_split ( "/\\W+/", "time warning status fields resarray roundoff words" ) as $key ) foreach ( preg_split ( "/\\W+/", "time warning status fields resarray roundoff" ) as $key ) unset ( $set[$key] ); if ( $variants_match && isset ( $set["attrs"] ) ) { foreach ( $set["attrs"] as $k=>$v ) { if ( $v==SPH_ATTR_MULTI64 ) $set["attrs"][$k] = SPH_ATTR_MULTI; } } } function ChildrenArray ( $node, $name="" ) { $res = array (); if ( !empty($node) && $node->hasChildNodes() ) for ( $i=0; $i<$node->childNodes->length; $i++ ) { $child = $node->childNodes->item ( $i ); if ( $name=="" || strtolower($child->nodeName)==$name ) $res[] = $child; } return $res; } function GetFirstChild ( $node, $name ) { $children = ChildrenArray ( $node, $name ); return empty($children) ? NULL : $children[0]; } function GetFirstChildValue ( $node, $name, $default="" ) { $child = GetFirstChild ( $node, $name ); return is_null($child) ? $default : $child->nodeValue; } class SphinxConfig { private $_name; private $_db_create; private $_db_drop; private $_db_insert; private $_custom_insert; private $_counters; private $_dynamic_entries; private $_queries; private $_sphqueries; private $_query_settings; private $_query_attributes; private $_indexer_runs; private $_custom_test; private $_sd_address; private $_sd_port; private $_sd_sphinxql_port; private $_sd_pid_file; private $_num_agents; private $_subtest; private $_subtestcount; private $_results; private $_results_model; private $_prereqs; private $_config; ///< config DOM node private $_use_sphinxql; ///< true, if sphinxql queries exist private $_indexdata; ///< data for use "insert into" instead of run indexer private $_connection; ///< mysql connection (since we cound use mysql ans sqphinxql together) private $_testdir; ///< the path to the directory with current test (namely for accessing data without knowing the test name) function SetConnection ( $connection ) { $this->_connection = $connection; } function SphinxConfig () { global $sd_address, $sd_port, $sd_sphinxql_port, $sd_pid_file; $this->_counters = array (); $this->_dynamic_entries = array (); $this->_queries = array (); $this->_sphqueries = array (); $this->_results = array (); $this->_results_model = array (); $this->_query_attributes = array (); $this->_indexer_runs = array (); $this->_db_create = array (); $this->_db_drop = array (); $this->_db_insert = array (); $this->_custom_insert = array (); $this->_num_agents = 1; $this->_subtest = 0; $this->_subtestcount = 0; $this->_sd_address = $sd_address; $this->_sd_port = $sd_port; $this->_sd_sphinxql_port = $sd_sphinxql_port; $this->_sd_pid_file = $sd_pid_file; $this->_custom_test = ""; $this->_compat098 = false; $this->_skip_indexer = false; $this->_use_sphinxql = false; $this->_indexdata = array (); $this->_connection = false; $this->_testdir = ""; } function EnableCompat098 () { $this->_compat098 = true; } function SubtestNo () { return $this->_subtest; } function SubtestCount () { return $this->_subtestcount; } function Name () { return $this->_name; } function DB_Drop () { return $this->_db_drop; } function DB_Create () { return $this->_db_create; } function DB_Insert () { return $this->_db_insert; } function DB_CustomInsert () { return $this->_custom_insert; } function NumAgents () { return $this->_num_agents; } function AddressAPI () { return $this->_sd_address; } function Port () { return $this->_sd_port; } function Requires ( $name ) { return isset($this->_prereqs[$name]); } function IsQueryTest () { return strlen ( $this->_custom_test ) == 0; } function IsSphinxqlTest () { return $this->_use_sphinxql; } function IsNeedDB() { return ! ( empty ( $this->_db_drop ) && empty ( $this->_db_create ) && empty ( $this->_db_insert ) ); } function IsRt() { global $g_locals; if ( !array_key_exists ('rt_mode', $g_locals) ) return false; return $g_locals['rt_mode']; } function NeedIndexerEx () { return count ( $this->_indexer_runs ) > 0; } function Results () { return $this->_results; } function GetQuery ( $i ) { return $this->_queries[$i]; } function IsSkipIndexer () { return $this->_skip_indexer; } function SetTestDir ( $dir ) { $this->_testdir = $dir; } function GetLocal ( $key ) { global $g_locals; if ( !array_key_exists ( $key, $g_locals ) ) { printf ( "FATAL: unbound local variable '%s' (go add it at ~/.sphinx).\n", $key ); exit ( 1 ); } return $g_locals[$key]; } function CreateNextConfig () { return $this->GenNextCfg ( 0 ); } function SubtestFinished () { $this->_subtest++; } function SubtestFailed () { $this->_subtest++; $failed = array (); array_push ( $failed, "failed" ); if ( IsModelGenMode () ) array_push ( $this->_results_model, $failed ); } function ModelSubtestFailed () { $failed = array (); array_push ( $failed, "failed" ); return $this->_results_model [$this->SubtestNo ()] == $failed; } function SetAgent ( $agent ) { if ( !is_array ( $agent ) ) return; $this->_sd_address = $agent ["address"]; $this->_sd_port = $agent ["port"]; $this->_sd_sphinxql_port = $agent ["sqlport"]; } function SetPIDFile ( $pidfile ) { $this->_sd_pid_file = $pidfile; } function GenNextCfg ( $i ) { if ( count ( $this->_dynamic_entries ) == 0 ) return FALSE; $num_variants = count ( ChildrenArray ( $this->_dynamic_entries[$i], "variant" ) ); if ( $this->_counters [$i] == $num_variants - 1 ) { if ( $i == count ( $this->_dynamic_entries ) - 1 ) return FALSE; else { $this->_counters [$i] = 0; return $this->GenNextCfg ( $i + 1 ); } } else $this->_counters [$i]++; return TRUE; } function WriteCustomTestResults ( $fp ) { $res_fmt = $this->FormatResultSet ( 0, $this->_results ); fwrite ( $fp, $res_fmt ); } function GatherEntities ( $node, &$array ) { foreach ( ChildrenArray($node) as $child ) if ( $child->nodeType == XML_ELEMENT_NODE ) array_push ( $array, $child->nodeValue ); } function GatherNodes ( $node ) { if ( $node->nodeType != XML_TEXT_NODE && $node->nodeType != XML_DOCUMENT_NODE && strtolower ( $node->nodeName ) == "dynamic" ) { $node->id = count ( $this->_dynamic_entries ); array_push ( $this->_dynamic_entries, $node ); array_push ( $this->_counters, 0 ); } for ( $i = 0; !is_null ( $node->childNodes ) && $i < $node->childNodes->length; $i++ ) $this->GatherNodes ( $node->childNodes->item ( $i ) ); } function ParseRange ( $range ) { if ( !$range ) return false; $values = explode ( ' ', $range ); if ( count($values) != 2 ) { printf ( "ERROR: malformed range attribute: '%s'\n", $range ); return false; } return array ( 'min' => $values[0], 'max' => $values[1] ); } function ParseIndexWeights ( $weights ) { if ( !$weights ) return false; $result = array(); preg_match_all ( '/([^\s]+):(\d+)/', $weights, $matches, PREG_SET_ORDER ); foreach ( $matches as $match ) $result [ $match[1] ] = (int)$match[2]; return $result; } function Load ( $config_file ) { // load the file $doc = new DOMDocument ( "1.0", "utf-8" ); if ( !$doc->load ( $config_file ) ) return false; // check for proper root node if ( !$doc->hasChildNodes() ) return false; $xml = $doc->childNodes->item(0); if ( strtolower($xml->nodeName)!="test" ) return false; $custom = GetFirstChild ( $xml, "custom_test" ); if ( $custom ) { $this->_custom_test = $custom->nodeValue; if ( $doc->encoding != 'utf-8' ) $this->_custom_test = iconv ( 'utf-8', $doc->encoding, $this->_custom_test ); } // extract indexer run params $indexer_run = GetFirstChild ( $xml, "indexer" ); if ( $indexer_run ) { foreach ( ChildrenArray ( $indexer_run, "run" ) as $run ) $this->_indexer_runs [] = $run->nodeValue; } // extract queries $qs = GetFirstChild ( $xml, "queries" ); if ( $qs ) { // new and cool foreach ( ChildrenArray ( $qs, "query" ) as $q ) { $res = array ( "query" => array ( $q->nodeValue ) ); // parse query mode $mode = 0; $mode_s = $q->getAttribute("mode"); switch ( $mode_s ) { case "": $mode_s = "(default)"; break; case "all": $mode = SPH_MATCH_ALL; break; case "any": $mode = SPH_MATCH_ANY; break; case "phrase": $mode = SPH_MATCH_PHRASE; break; case "extended": $mode = SPH_MATCH_EXTENDED; break; case "extended2": $mode = SPH_MATCH_EXTENDED2; break; default: printf ( "$config_file: unknown matching mode '%s'\n", $mode_s ); return false; } $res["mode"] = $mode; $res["mode_s"] = $mode_s; // parse ranker $ranker = 0; $ranker_s = $q->getAttribute("ranker"); if ( empty($ranker_s) ) { $ranker_s = "(default)"; } else { $ranker = @constant("SPH_RANK_" . strtoupper($ranker_s)); if ( $ranker===NULL ) { printf ( "$config_file: unknown ranker '%s'\n", $ranker_s ); return false; } } $res["ranker"] = $ranker; $res["ranker_s"] = $ranker_s; // parse filter $res["filter"] = $q->getAttribute("filter"); $res["filter_value"] = $q->getAttribute("filter_value" ); $res["filter_range"] = $this->ParseRange ( $q->getAttribute("filter_range" ) ); // parse sort mode and get clause $sortmode = 0; $sortmode_s = $q->getAttribute("sortmode"); switch ( $sortmode_s ) { case "": $sortmode_s = "(default)"; break; case "extended": $sortmode = SPH_SORT_EXTENDED; break; case "expr": $sortmode = SPH_SORT_EXPR; break; default: printf ( "$config_file: unknown sorting mode '%s'\n", $sortmode_s ); return false; } $res["sortmode"] = $sortmode; $res["sortmode_s" ] = $sortmode_s; $res["sortby"] = $q->getAttribute("sortby"); // groupby $groupfunc = 0; $groupfunc_s = $q->getAttribute("groupfunc"); switch ( $groupfunc_s ) { case "": $groupfunc = SPH_GROUPBY_ATTR; $groupfunc_s = "attr"; break; case "day": $groupfunc = SPH_GROUPBY_DAY; break; case "week": $groupfunc = SPH_GROUPBY_WEEK; break; case "month": $groupfunc = SPH_GROUPBY_MONTH; break; case "year": $groupfunc = SPH_GROUPBY_YEAR; break; case "attr": $groupfunc = SPH_GROUPBY_ATTR; break; case "attrpair": $groupfunc = SPH_GROUPBY_ATTRPAIR; break; default: printf ( "$config_file: unknown groupby func '%s'\n", $groupfunc_s ); return false; } $res["groupfunc"] = $groupfunc; $res["groupfunc_s"] = $groupfunc_s; $res["groupattr"] = $q->getAttribute("groupattr"); $groupsort = $q->getAttribute("groupsort"); if ( $groupsort == "" ) $groupsort = "@group desc"; $res["groupsort"] = $groupsort; $res["groupdistinct"] = $q->getAttribute("groupdistinct"); $res["resarray"] = $q->getAttribute("resarray"); $res["index"] = $q->getAttribute("index"); $res["select"] = $q->getAttribute("select"); $res["id_range"] = $this->ParseRange ( $q->getAttribute("id_range") ); $res["index_weights"] = $this->ParseIndexWeights ( $q->getAttribute("index_weights") ); $res["roundoff"] = $q->getAttribute("roundoff"); $res["expect_error"] = $q->getAttribute("expect_error"); $res["tag"] = $q->getAttribute("tag"); $res["cutoff"] = $q->getAttribute("cutoff"); $res["limits"] = $q->getAttribute("limits"); // add query if ( $q->getAttribute("source") ) { $source = $q->getAttribute("source"); if ( substr ( $source, 0, 6 ) == "local:" ) $source = $this->GetLocal ( substr ( $source, 6 ) ); if ( !is_readable($source) ) { printf ( "FATAL: query source file '%s' not found.\n", $source ); exit ( 1 ); } $queries = file ( $source, FILE_IGNORE_NEW_LINES ); $limit = $this->GetLocal('qlimit'); $res["query"] = $limit ? array_slice( $queries, 0, $limit ) : $queries; } $this->_queries[] = $res; } } else { // legacy $qs = array (); $this->GatherEntities ( GetFirstChild ( $xml, "query" ), $qs ); foreach ( $qs as $q ) { $this->_queries[] = array ( "query" => array ( $q ), "mode" => 0, "mode_s" => "(default)", "ranker" => 0, "ranker_s" => "(default)" ); } } // extract queries $qs = GetFirstChild ( $xml, "sphqueries" ); if ( $qs ) { $this->_use_sphinxql = true; // new and cool foreach ( ChildrenArray ( $qs, "sphinxql" ) as $q ) $this->_sphqueries[] = array ( "query" => $q->nodeValue ); } // extract my settings $this->_config = GetFirstChild ( $xml, "config" ); $this->GatherNodes ( $this->_config ); $this->GatherEntities ( GetFirstChild ( $xml, "query_attributes" ), $this->_query_attributes ); foreach ( ChildrenArray ( $xml, "db_create" ) as $node ) $this->_db_create []=$node->nodeValue; foreach ( ChildrenArray ( $xml, "db_drop" ) as $node ) $this->_db_drop []=$node->nodeValue; foreach ( ChildrenArray ( $xml, "db_insert" ) as $node ) $this->_db_insert []=$node->nodeValue; foreach ( ChildrenArray ( $xml, "custom_insert" ) as $node ) $this->_custom_insert []=$node->nodeValue; $this->_name = GetFirstChildValue ( $xml, "name" ); $this->_query_settings = GetFirstChildValue ( $xml, "query_settings" ); $this->_num_agents = GetFirstChildValue ( $xml, "num_agents", 1 ); $this->_skip_indexer = GetFirstChildValue ( $xml, "skip_indexer", false )!==false; $this->_prereqs = array(); $prereqs = GetFirstChild ( $xml, "requires", false ); if ( $prereqs ) foreach ( ChildrenArray ( $prereqs ) as $node ) $this->_prereqs [ $node->nodeName ] = 1; // precalc subtests count $this->_subtestcount = 1; foreach ( $this->_dynamic_entries as $entry ) { $variants = count ( ChildrenArray ( $entry, "variant" ) ); $this->_subtestcount *= max ( $variants, 1 ); } return true; } function RunIndexerEx ( &$error ) { foreach ( $this->_indexer_runs as $param ) { $retval = RunIndexer ( $error, $param ); if ( $retval != 0 ) return $retval; } return 0; } function RunQuerySphinxQL ( &$error, $bench=false ) { global $sd_address, $sd_sphinxql_port, $action_retries, $action_wait_timeout; $total = $done = 0; if ($sd_address == "localhost") $connect_string = "127.0.0.1:$sd_sphinxql_port"; else $connect_string = "$sd_address:$sd_sphinxql_port"; $connection = @mysql_connect ( $connect_string ); if ( $connection === false ) return false; $qcount = count($this->_sphqueries); if ( $bench === true ) $qcount *= 2; for ( $n=0; $n<$qcount; $n++ ) { $query = 'show meta'; if ( $bench === false ) $query = $this->_sphqueries[$n]["query"]; else if ( ($n%2)==0 ) $query = $this->_sphqueries[$n/2]["query"]; $query_result = array (); $no_time = false; if ( $bench===false && ( stripos ( $query, 'show' )!==false ) && ( stripos ( $query, 'meta' )!==false ) ) $no_time = true; $query_result["sphinxql"]=$query; $result = mysql_wr ($query,$connection); if ($result===true) $query_result["total_affected"] = mysql_affected_rows($connection); else if ($result===false) { $query_result["error"] = mysql_error( $connection ); $query_result["errno"] = mysql_errno( $connection ); } else { $query_result["total_rows"] = mysql_num_rows($result); while ($row = mysql_fetch_array($result, MYSQL_ASSOC)) { if ( $no_time===true && array_key_exists ( 'Variable_name', $row ) && $row['Variable_name']=='time' ) continue; $query_result["rows"][] = $row; } mysql_free_result($result); } $this->_results[] = $query_result; if ( IsModelGenMode () ) { $this->_results_model[$this->SubtestNo ()][] = $query_result; } } mysql_close ( $connection ); return true; } function RunQuery ( $index, &$error, $benchmark = null ) { global $sd_address, $sd_port, $action_retries, $action_wait_timeout; $query_results = array (); $total = $done = 0; if ( $benchmark ) { foreach ( $this->_queries as $qinfo ) $total += count($qinfo['query']); $prefix = $benchmark; $compact = true; $tm = 0; $start = MyMicrotime(); } else $compact = false; $cl = new SphinxClient; $pconn = $benchmark && method_exists ( $cl, 'Open' ); if ( $pconn ) { $cl = new SphinxClient (); $cl->SetServer ( $sd_address, $sd_port ); $cl->Open (); } $retries = 1; if ( !$benchmark ) $retries = $action_retries; for ( $n=0; $n_queries); $n++ ) { $qinfo = $this->_queries[$n]; foreach ( $qinfo['query'] as $query ) { if ( $benchmark && MyMicrotime() > $tm ) { $tm = MyMicrotime(); $est = $done ? ( ( $tm - $start ) / $done ) * ( $total - $done ) : 0 ; $qps = $done / ( $tm - $start ); printf ( "\r$prefix %d/%d (est. %s, qps %.1f)", $done, $total, sphFormatTime($est), $qps ); $tm += 1; } $bOk = FALSE; for ( $i=0; $i<$retries && !$bOk; $i++ ) { if ( !$pconn ) { $cl = new SphinxClient (); $cl->SetServer ( $sd_address, $sd_port ); } else { $cl->ResetFilters (); $cl->ResetGroupBy (); } $results = 0; if ( empty($this->_query_settings) ) { $my_index = $index; if ( @$qinfo["mode"] ) $cl->SetMatchMode ( $qinfo["mode"] ); if ( @$qinfo["ranker"] ) $cl->SetRankingMode ( $qinfo["ranker"] ); if ( @$qinfo["sortmode"] ) $cl->SetSortMode ( $qinfo["sortmode"], $qinfo["sortby"] ); if ( @$qinfo["groupattr"] ) $cl->SetGroupBy ( $qinfo["groupattr"], $qinfo["groupfunc"], $qinfo["groupsort"] ); if ( @$qinfo["groupdistinct"] ) $cl->SetGroupDistinct ( $qinfo["groupdistinct"] ); if ( @$qinfo["resarray"] ) $cl->SetArrayResult ( true ); if ( @$qinfo["select"] ) $cl->SetSelect ( $qinfo["select"] ); if ( @$qinfo["id_range"] ) $cl->SetIDRange ( $qinfo["id_range"]["min"], $qinfo["id_range"]["max"] ); if ( @$qinfo["index"] ) $my_index = $qinfo["index"]; if ( @$qinfo["index_weights"] ) $cl->SetIndexWeights ( $qinfo["index_weights"] ); if ( @$qinfo["cutoff"] ) $cl->SetLimits ( 0, 20, 0, $qinfo["cutoff"] ); if ( @$qinfo["limits"] ) $cl->SetLimits ( 0, (int)$qinfo["limits"] ); if ( @$qinfo["filter"] ) { $name = $qinfo["filter"]; if ( @$qinfo["filter_value"] ) $cl->SetFilter ( $name, array ( $qinfo["filter_value"] ) ); elseif ( @$qinfo["filter_range"] ) { $range = $qinfo["filter_range"]; $cl->SetFilterRange ( $name, $range['min'], $range['max'] ); } } $results = $cl->Query ( $query, $my_index, "run".(1+$this->SubtestNo()) ); if ( is_array($results) ) { $results["resarray"] = (int)@$qinfo["resarray"]; $results["roundoff"] = (int)@$qinfo["roundoff"]; } } else { $run_func = create_function( '$client, $query, $index, &$results', $this->_query_settings ); $run_func ( $cl, $query, $index, $results ); } if ( $results ) { // let also work with "array of arrays" result if ( array_key_exists ( "error",$results ) ) { $bOk = TRUE; if ( $compact ) $results = array ( $n, $results['total'], $results['total_found'], $results['time'] ); else $results ["query"] = $query; array_push ( $query_results, $results ); } else foreach ( $results as $result ) { $bOk = TRUE; if ( $compact ) $result = array ( $n, $result['total'], $result['total_found'], $result['time'] ); else $result ["query"] = $query; array_push ( $query_results, $result ); } } else if ( @$qinfo["expect_error"] && !$cl->IsConnectError() ) { $bOk = true; array_push ( $query_results, array ( "query" => $query, "error" => $cl->GetLastError(), "warning" => "", "total" => 0, "total_found" => 0, "time" => 0 ) ); } else { if ( method_exists ( $cl, 'IsConnectError' ) && $cl->IsConnectError() ) usleep ( $action_wait_timeout ); else if ( $benchmark && $done ) { array_push ( $query_results, array ( $n, -1, -1, 0 ) ); $bOk = true; } else break; } } $done++; if ( !$bOk ) { $error = sprintf ( "query %d/%d: %s", $n+1, count($this->_queries), $cl->GetLastError() ); return FALSE; } } } $this->_results = $query_results; if ( IsModelGenMode () && count($this->_queries) !=0 ) array_push ( $this->_results_model, $query_results ); if ( $benchmark ) printf ( " - done in %s\n", sphFormatTime ( MyMicrotime() - $start ) ); if ( $pconn ) $cl->Close (); return TRUE; } function RunCustomTest ( & $error ) { global $sd_address, $sd_port, $action_retries, $action_wait_timeout, $g_locals; $bOk = false; $results = false; for ( $i = 0; $i < $action_retries && !$bOk; $i++ ) { $cl = new SphinxClient (); $cl->SetServer ( $sd_address, $sd_port ); $results = false; $run_func = create_function( '$client, &$results', $this->_custom_test ); if ( !@mysql_connect ( $g_locals['db-host'].":".$g_locals['db-port'], $g_locals['db-user'], $g_locals['db-password'] ) || !@mysql_select_db ( $g_locals['db-name'] ) ) return FALSE; $GLOBALS["this_test"] = $this->_testdir; $run_func ( $cl, $results ); @mysql_close(); if ( $results ) $bOk = TRUE; else usleep ( $action_wait_timeout ); } if ( !$bOk ) { $error = $cl->GetLastError (); return FALSE; } $my_results = array (); $my_results [] = $results; $this->_results = $my_results; if ( IsModelGenMode () ) array_push ( $this->_results_model, $my_results ); return TRUE; } function FixKeys ( $v ) { if ( is_array($v) ) { $result = array(); foreach ( $v as $key=>$value ) { if ( $key==PHP_INT_MAX || $key==-PHP_INT_MAX-1 ) $key = (int)$key; $result[$key] = $this->FixKeys ( $value ); } return $result; } else return $v; } function LoadModel ( $filename ) { if ( ! IsModelGenMode () ) { if ( ! file_exists ( $filename ) ) return -1; $contents = file_get_contents ( $filename ); if ( ! $contents ) return 0; $this->_results_model = $this->FixKeys ( unserialize ( $contents ) ); } return 1; } function CompareToModel () { return $this->CompareResults ( $this->FixKeys ( $this->_results ), $this->_results_model [$this->SubtestNo ()] ); } function CompareResultSets ( $set1, $set2 ) { $roundoff = 0; if ( isset($set1["roundoff"]) ) $roundoff = $set1["roundoff"]; if ( isset($set2["roundoff"]) ) $roundoff = $set2["roundoff"]; $variants_match = $this->Requires("variant_match"); CompareResultSetFixup ( $set1, $roundoff, $variants_match ); CompareResultSetFixup ( $set2, $roundoff, $variants_match ); return $set1==$set2; } function CompareResults ( $query1, $query2 ) { if ( count($query1)!=count($query2) ) return false; for ( $i=0; $iCompareResultSets ( $query1[$i], $query2[$i] ) ) return false; return true; } /// returns false if everything is okay /// returns error messages if something failed function CheckVariants ( $output_path ) { if ( !$this->Requires("variant_match") ) return false; $total = count ( $this->_results_model ); if ( $total==1 ) return "variant match required, but there are no variants"; else if ( !$this->IsQueryTest() ) return "variant match is not supported with custom tests"; $failed = false; $output = ''; for ( $i=1; $i<$total; $i++ ) { $nqueries = count ( $this->_results_model[0] ); for ( $k=0; $k<$nqueries; $k++ ) if ( !$this->CompareResultSets ( $this->_results_model[0][$k], $this->_results_model[$i][$k] ) ) { $first = $this->FormatResultSet ( $k+1, $this->_results_model[0][$k] ); $current = $this->FormatResultSet ( $k+1, $this->_results_model[$i][$k] ); file_put_contents ( "first", $first ); file_put_contents ( "current", $current ); system ( "diff --unified=3 first current > diff.txt" ); $diff = file_get_contents ( "diff.txt" ); unlink ( "current" ); unlink ( "first" ); unlink ( "diff.txt" ); $output .= $diff . "\n"; $failed = true; } } if ( $failed ) { file_put_contents ( $output_path, $output ); return "variants mismatch; see $output_path for details"; } // all ok, indicated by false ("no error") return false; } function WriteReportHeader ( $fp ) { fprintf ( $fp, "==== Run %d ====\n", $this->SubtestNo () + 1 ); fwrite ( $fp, "Settings:\n" ); $this->WriteDiff ( $fp ); fwrite ( $fp, "\n" ); if ( !empty ( $this->_query_settings ) ) fprintf ( $fp, "Query settings:\n%s\n", $this->_query_settings ); } function FormatResultSet ( $nquery, $result, $opts=array() ) { global $sd_skip_indexer; if ( !$this->IsQueryTest () || !is_array($result) ) return var_export ( $result, true )."\n"; if ( array_key_exists ("sphinxql", $result) ) { $str = "sphinxql> $result[sphinxql];\n"; if ( array_key_exists ("total_affected", $result) ) { $str .= "Query OK, $result[total_affected] rows affected\n"; } else if ( array_key_exists ("error", $result) ) { $str .= "ERROR $result[errno]: $result[error]\n"; } else if (array_key_exists ("rows", $result) ) { foreach ( $result["rows"][0] as $key=>$s ) $str .= "\t$key"; $str .= "\n"; foreach ($result["rows"] as $row) { foreach ($row as $value) $str .= "\t$value"; $str .="\n"; } $str .="$result[total_rows] rows in set\n"; } else if ( isset($result["total_rows"]) ) { $str .= "$result[total_rows] rows in set\n"; } return $str."\n"; } // format header $qinfo = @$this->_queries[$nquery-1]; if ( @array_key_exists ( "index", $qinfo ) && $qinfo ["index"] != '*' ) $str = "--- Query $nquery (mode=$qinfo[mode_s],ranker=$qinfo[ranker_s],index=$qinfo[index]) ---\n"; else $str = "--- Query $nquery (mode=$qinfo[mode_s],ranker=$qinfo[ranker_s]) ---\n"; if ( @$qinfo["groupattr"] ) $str .= "GroupBy: attr: '".$qinfo["groupattr"]."' func: '".$qinfo["groupfunc_s"]."' sort: '".$qinfo["groupsort"]."'\n"; if ( @$qinfo["sortmode"] == SPH_SORT_EXPR ) $str .= "Sort: expr: ".$qinfo["sortby"]."\n"; $str .= @"Query '$result[query]': retrieved $result[total_found] of $result[total] matches in $result[time] sec.\n"; if ( $result["error"] ) $str .= "Error: $result[error]\n"; if ( $result["warning"] ) $str .= "Warning: $result[warning]\n"; $array_result = @$result["resarray"]; // format keywords if ( isset($result["words"]) && is_array($result["words"]) ) { $str .= "Word stats:\n"; foreach ( $result ["words"] as $word => $word_result ) { $hits = $word_result ["hits"]; $docs = $word_result ["docs"]; $str .= "\t'$word' found $hits times in $docs documents\n"; } } // format attribute types if ( @$opts["format_attrs"] ) { $typenames = array ( SPH_ATTR_INTEGER => "int", SPH_ATTR_TIMESTAMP=> "timestamp", SPH_ATTR_ORDINAL => "ordinal", SPH_ATTR_BOOL => "bool", SPH_ATTR_FLOAT => "float", SPH_ATTR_BIGINT => "bigint", SPH_ATTR_STRING => "string", SPH_ATTR_MULTI => "mva", SPH_ATTR_MULTI64 => "mva" ); // !COMMIT $n = 1; $str .= "Result set attributes:\n"; foreach ( $result["attrs"] as $name=>$type ) { $typename = "type-$type"; if ( $typenames[$type] ) $typename = $typenames[$type]; $str .= "\tattr $n: $typename $name\n"; $n++; } } // check our table for well-known id column names $idcol = ""; if ( $this->IsNeedDB() ) $r = mysql_query ( "DESC test_table", $this->_connection ); else $r = false; if ( $r ) { while ( $row = mysql_fetch_assoc($r) ) { $idcand = strtolower ( $row["Field"] ); if ( in_array ( $idcand, array ( "id", "document_id" ) ) ) { $idcol = $idcand; break; } } } // format matches $str .= "\n"; if ( isset($result["matches"]) && is_array($result["matches"]) ) { $n = 1; $str .= "Matches:"; foreach ( $result ["matches"] as $doc => $docinfo ) { $doc_id = $array_result ? $docinfo["id"] : $doc; $weight = $docinfo["weight"]; $str .= "\n$n. doc_id=$doc_id, weight=$weight"; $n++; // only format specified attrs if requested if ( !empty ( $this->_query_attributes ) ) { foreach ( $this->_query_attributes as $attr ) if ( isset($docinfo ["attrs"][$attr]) ) { $val = $docinfo["attrs"][$attr]; if ( is_array ( $val ) ) $val = join ( " ", $val ); $str .= " $attr=$val"; } continue; } // fetch and format fields from db by default if ( $idcol ) { if ( $this->IsNeedDB() ) $query_res = mysql_query ( "select * from test_table where $idcol = $doc_id", $this->_connection); else $query_res = false; if ( $query_res ) { $row = mysql_fetch_assoc ( $query_res ); if ( $row ) foreach ( $row as $col_name => $col_content ) if ( array_search ( $col_name, $result["fields"] )!==false ) $str .= " $col_name=\"$col_content\""; } } // format attrs foreach ( $docinfo["attrs"] as $attr=>$val ) { if ( is_array($val) ) $val = join ( ",", $val ); $str .= " $attr=\"$val\""; } } $str .= "\n\n"; } return $str . "\n"; } /// format and write a single result set into log file function WriteQuery ( $fp, $nquery, $result ) { $res_fmt = $this->FormatResultSet ( $nquery, $result ); fwrite ( $fp, $res_fmt ); } /// write all the result sets function WriteResults ( $fp ) { if ( $this->IsQueryTest () ) { $nquery = 1; foreach ( $this->_results as $result ) $this->WriteQuery ( $fp, $nquery++, $result ); } else $this->WriteCustomTestResults ( $fp ); } /// write difference from the reference result sets function WriteReferenceResultsDiff ( $fp ) { global $windows; $nquery = 0; if ( !is_array ( $this->_results_model [ $this->SubtestNo() ] ) ) return; foreach ( $this->_results_model [ $this->SubtestNo() ] as $ref ) { $cur = $this->_results[$nquery]; if ( $this->CompareResultSets ( $ref, $cur ) ) { $nquery++; continue; } $opts = array(); if ( isset($cur["attrs"]) || isset($ref["attrs"]) ) if ( @$cur["attrs"]!=@$ref["attrs"] ) $opts["format_attrs"] = 1; $result_f_cur = $this->FormatResultSet ( $nquery+1, $this->_results[$nquery], $opts ); $result_f_ref = $this->FormatResultSet ( $nquery+1, $ref, $opts ); file_put_contents ( "current", $result_f_cur ); file_put_contents ( "reference", $result_f_ref ); system ( "diff --unified=3 reference current > diffed.txt" ); $diffed = file_get_contents ( "diffed.txt" ); unlink ( "current" ); unlink ( "reference" ); unlink ( "diffed.txt" ); $nquery++; fwrite ( $fp, "=== query $nquery diff start ===\n" ); fwrite ( $fp, $diffed ); fwrite ( $fp, "=== query $nquery diff end ===\n" ); } $nref = count ( array_keys ( $this->_results_model [ $this->SubtestNo() ] ) ); $nres = count ( array_keys ( $this->_results ) ); if ( $nres > $nref ) { $delta = $nres - $nref; fwrite ( $fp, "$delta result set(s) missing from model!\n" ); } } function EraseIndexFiles ( $path ) { $dh = glob ( "./$path.*" ); foreach ( $dh as $entry ) { if ( is_file ($entry) ) unlink ($entry); } } function WriteConfig ( $filename, $agentid, &$msg, $collectdata = true ) { global $g_locals; $fp = fopen ( $filename, 'w' ); if ( !$fp ) { $msg = "Can't open file $filename for writing"; return FALSE; } $this->Dump ( $this->_config, $fp, false, $agentid ); fclose ( $fp ); $fp = fopen ( $filename, 'r' ); if ( !$fp ) { $msg = "Can't open file $filename for reading"; return FALSE; } $config = fread ( $fp, filesize ( $filename ) ); fclose ( $fp ); // for rt case - extract the schema from the config // and make the new config, making the index as rt instead if ( $this->IsRt() ) { $body = 1; $srcname = 2; $parent = 4; $content = 5; $epilog = 6; $pattern = "/.*?(source\s+(\S*?)(\s*\:\s*(\S*?))?\s*\{(.*?)\})(.*?)/s"; preg_match_all ( $pattern, $config, $matches, PREG_SET_ORDER | PREG_OFFSET_CAPTURE ); $schemas = array(); $shift = 0; $newconfig = ""; // parse sources foreach ( $matches as $match ) { $lines = explode("\n", $match[$content][0]); $insert_schema = array(); $insert_types = array(); if ( $match[$parent][0] != "" ) $insert_types = $schemas[$match[$parent][0]]['types']; $sql_query_pre = array(); $sql_query = ""; foreach ( $lines as $line ) { // skip comment lines (if any) if ( preg_match ( "/\s*#/" , $line ) > 0 ) continue; // extract config key/value pairs $eq = strpos ( $line,"=" ); if ($eq == 0) continue; $key = strtolower ( trim ( substr($line,0,$eq), " \t" ) ); $value = trim ( substr($line,$eq+1), " \t" ); // handle known keys switch ( $key ) { case "type": if ( $value != "mysql" ) { $msg = "non-mysql source (type=$value), skipping..."; return FALSE; } break; case "sql_attr_uint": $insert_types[$value] = "rt_attr_uint"; break; case "sql_attr_bigint": $insert_types[$value] = "rt_attr_bigint"; break; case "sql_attr_float": $insert_types[$value] = "rt_attr_float"; break; case "sql_attr_timestamp": $insert_types[$value] = "rt_attr_timestamp"; break; // case "sql_attr_multi" TBD // case "sql_attr_string" TBD case "sql_query_pre": $sql_query_pre[] = $value; break; case "sql_query": $sql_query = $value; break; } } // query is kinda mandatory if ( !$sql_query ) { $msg = "missing sql_query"; return false; } // now let's connect to MySQL, run the query, and fetch the values $conn = ConnectDB(); if ( !$conn ) { $msg = "can't connect or select the database"; return false; } // gotta run pre-queries first! foreach ( $sql_query_pre as $q ) { if ( mysql_wr ( $q, $conn ) ) continue; $msg = sprintf ( "sql_query_pre failed (query=%s, error=%s)", $q, mysql_error ( $conn ) ); mysql_close ( $conn ); return false; } // run main query $res = mysql_wr ( $sql_query, $conn ); if ( !$res ) { $msg = sprintf ( "sql_query failed (query=%s, error=%s)", $sql_query, mysql_error ( $conn ) ); $msg = "sql_query can't fetch test data: " . mysql_error ( $conn ); mysql_close ( $conn ); return false; } // fetch fields $insert_schema = array ( "id" => 0 ); for ( $i=1; $i < mysql_num_fields($res); $i++ ) $insert_schema [ mysql_fetch_field ( $res, $i )->name ] = $i; // fetch data $insert_values = array(); while ( $row = mysql_fetch_row($res) ) $insert_values[] = array_values ( $row ); // cleanup mysql_free_result ( $res ); mysql_close ( $conn ); // store $schema = array(); $schema['types'] = $insert_types; if ( $match[$parent][0] != "" ) $schema['orders'] = $schemas[$match[$parent][0]]['orders']; else $schema['orders'] = $insert_schema; $schema['values'] = $insert_values; $schema['sqlport'] = $this->_sd_sphinxql_port; $schemas[$match[$srcname][0]] = $schema; $srclen = $match[$epilog][1] - $match[$body][1]; $config = substr_replace ( $config, "", $match[$body][1]-$shift,$srclen ); $shift += $srclen; } $body = 1; $idxname = 2; $parent = 4; $content = 5; $epilog = 6; $pattern = "/.*?(index\s+(\S*?)(\s*\:\s*(\S*?))?\s*\{(.*?)\})(.*?)/s"; preg_match_all ( $pattern, $config, $matches, PREG_SET_ORDER | PREG_OFFSET_CAPTURE ); $shift = 0; // parse indexes $indexes = array(); foreach ( $matches as $match ) { $idx = "index ".$match[$idxname][0]; if ( $match[$parent][0] != "" ) $idx .= " : ".$match[$parent][0]; $idx .= "\n{\n"; $lines = explode("\n", $match[$content][0]); $justcopy = false; $rtcopy = false; $idxbody = ""; foreach ($lines as $line) { // skip comment lines (if any) if ( preg_match ( "/\s*#/" , $line ) > 0 ) continue; $eq = strpos ( $line,"=" ); if ($eq == 0) continue; $key = strtolower ( trim ( substr($line,0,$eq), " \t" ) ); $value = trim ( substr($line,$eq+1), " \t" ); switch ( $key ) { case "type": if ($value=="rt") $rtcopy = true; else $justcopy = true; break; case "source"; { $idxbody .= "\ttype\t= rt\n"; if ( $collectdata ) $indexes[$match[$idxname][0]] = $schemas[$value]; foreach ( array_keys( $schemas[$value]['orders'] ) as $key ) if ( $key != "id" && $key != "document_id" ) { if ( array_key_exists ( $key, $schemas[$value]['types'] ) ) $idxbody .= "\t".$schemas[$value]['types'][$key]."\t= $key\n"; else $idxbody .= "\trt_field\t= $key\n"; } break; } case "path": $this->EraseIndexFiles($value); if ($rtcopy) $justcopy = true; // no break! default: $idxbody .= "\t$key\t= $value\n"; } if ( $justcopy ) // explicitly defined type, don't transform to rt. { $idxbody = $match[$content][0]; break; } } $idx .= "$idxbody\n}\n"; $srclen = $match[$epilog][1] - $match[$body][1]; $config = substr_replace ($config, $idx, $match[$body][1]-$shift,$srclen ); $shift += $srclen-strlen($idx); } if ( $collectdata ) foreach ($indexes as $key => $value) $this->_indexdata[$key] = $value; $fp = fopen ( $filename, 'w' ); if ( !$fp ) { $msg = "Can't open $filename for writing"; return FALSE; } fwrite ( $fp, $config ); fclose ( $fp ); } else // for rt indexes we need to clean up all index files before the run. { $pattern = "/.*?index\s+\S*?(\s*\:\s*\S*?)?\s*\{(.*?)\}.*?/s"; preg_match_all ( $pattern, $config, $matches, PREG_SET_ORDER | PREG_OFFSET_CAPTURE ); // parse indexes $indexes = array(); foreach ( $matches as $match ) { $lines = explode("\n", $match[2][0]); $path = ""; $isrt = false; foreach ($lines as $line) { // skip comment lines (if any) if ( preg_match ( "/\s*#/" , $line ) > 0 ) continue; $eq = strpos ( $line,"=" ); if ($eq == 0) continue; $key = strtolower ( trim ( substr($line,0,$eq), " \t" ) ); $value = trim ( substr($line,$eq+1), " \t" ); switch ( $key ) { case "type": if ($value=="rt") $isrt = true; break; case "path": $path = $value; } if ( $isrt && $path!="" ) { $this->EraseIndexFiles($path); break; } } } } return TRUE; } function InsertIntoIndexer ( &$error ) { global $sd_address, $sd_sphinxql_port, $action_retries, $action_wait_timeout; $address = $sd_address; if ($address == "localhost") $address = "127.0.0.1"; $cn = false; $port = 0; foreach ( $this->_indexdata as $name => $data ) { if ( $port != $data["sqlport"] ) { $port = $data["sqlport"]; $connect_string = "$address:$port"; if ( $cn !== false ) mysql_close ( $cn ); $cn = @mysql_connect ( $connect_string ); } if ( $cn === false ) return false; $cols = join ( ", ", array_keys ( $data["orders"] ) ); $prefix = "INSERT INTO $name ($cols) VALUES "; $accum = ""; foreach ($data['values'] as $row) { $query = ""; foreach ( $row as $column ) { if ( $query!="" ) $query .=","; $query .="'".$column."'"; } if ( ( strlen ($accum) + strlen ($query) ) > 1024000 ) ///Dump ( $this->_config, $fp, true, "all" ); } function WriteModel ( $filename ) { if ( IsModelGenMode () ) file_put_contents ( $filename, serialize ( $this->_results_model ) ); } function WriteSearchdSettings ( $fp ) { global $sd_log, $sd_query_log, $sd_read_timeout, $sd_max_children, $sd_pid_file, $sd_max_matches; if ( $this->_compat098 ) { fwrite ( $fp, "\taddress = {$this->_sd_address}\n" ); fwrite ( $fp, "\tport = {$this->_sd_port}\n" ); } else { fwrite ( $fp, "\tlisten = {$this->_sd_address}:{$this->_sd_port}\n" ); fwrite ( $fp, "\tlisten = {$this->_sd_address}:{$this->_sd_sphinxql_port}:mysql41\n" ); } fwrite ( $fp, "\tlog = $sd_log\n" ); fwrite ( $fp, "\tquery_log = $sd_query_log\n" ); fwrite ( $fp, "\tread_timeout = $sd_read_timeout\n" ); fwrite ( $fp, "\tmax_children = $sd_max_children\n" ); fwrite ( $fp, "\tpid_file = ".$this->_sd_pid_file."\n" ); fwrite ( $fp, "\tmax_matches = $sd_max_matches\n" ); if ( $this->IsRt() ) fwrite ( $fp, "\tworkers = threads\n" ); } function WriteSqlSettings ( $fp, $attributes ) { global $g_locals; fwrite ( $fp, "\tsql_host = " . $g_locals['db-host'] . "\n" ); fwrite ( $fp, "\tsql_user = " . $g_locals['db-user'] . "\n" ); fwrite ( $fp, "\tsql_pass = " . $g_locals['db-password'] . "\n" ); fwrite ( $fp, "\tsql_port = " . $g_locals['db-port'] . "\n" ); $node = $attributes->getNamedItem('sql_db'); fprintf ( $fp, "\tsql_db = %s\n", $node ? $node->nodeValue : $g_locals['db-name'] ); } function Dump ( $node, $fp, $dynamic_only, $agentid ) { global $index_data_path, $agents; $nodename = strtolower ( $node->nodeName ); if ( !$dynamic_only ) switch ( $nodename ) { case "#text": fwrite ( $fp, $node->nodeValue ); return; case "static": fwrite ( $fp, $node->nodeValue ); return; case "searchd_settings": $this->WriteSearchdSettings ( $fp ); return; case "sql_settings": $this->WriteSqlSettings ( $fp, $node->attributes ); return; case "my_address": case "agent0_address": fwrite ( $fp, $agents[0]["address"].":".$agents[0]["port"] ); return; case "agent_address": case "agent1_address": fwrite ( $fp, $agents[1]["address"].":".$agents[1]["port"] ); return; case "agent2_address": fwrite ( $fp, $agents[2]["address"].":".$agents[2]["port"] ); return; case "data_path": fwrite ( $fp, $index_data_path ); return; case "local": fwrite ( $fp, $this->GetLocal ( $node->nodeValue ) ); return; case "test_root": fwrite ( $fp, dirname(__FILE__) ); return; case "this_test": fwrite ( $fp, $this->_testdir ); return; } if ( $nodename=="variant" ) { fwrite ( $fp, "$node->nodeValue\n" ); } else if ( $nodename=="dynamic" ) { if ( !is_null($node->id) ) { $variants = ChildrenArray ( $node,"variant" ); $this->Dump ( $variants[$this->_counters[$node->id]], $fp, $dynamic_only, $agentid ); } } else if ( strpos ( $nodename, "agent" )===0 ) { if ( $agentid==="all" || $nodename=="agent$agentid" ) foreach ( ChildrenArray($node) as $child ) $this->Dump ( $child, $fp, $dynamic_only, $agentid ); } else { foreach ( ChildrenArray($node) as $child ) $this->Dump ( $child, $fp, $dynamic_only, $agentid ); } } } function HandleFailure ( $config, $report, $error, &$nfailed ) { $ret = true; if ( !IsModelGenMode() && !$config->ModelSubtestFailed () ) { $nfailed++; $ret = false; fwrite ( $report, "SUBTEST FAILED, UNEXPECTED ERROR:\n" ); } fwrite ( $report, "$error\n" ); $config->SubtestFailed (); return $ret; } function EraseDirContents ( $path ) { $fp = opendir ( $path ); if ( $fp ) { while ( ( $file = readdir ( $fp ) ) !== false ) { if ( $file != "." && $file != ".." && !is_dir ( $file ) ) unlink ( "$path/$file" ); } closedir ( $fp ); } } function CopyDirContents ( $from, $to ) { $ffrom = opendir ( $from ); if ( $ffrom && is_dir ( $to ) ) { while ( ( $file = readdir ( $ffrom ) ) !== false ) { if ( $file != "." && $file != ".." && !is_dir ( $file ) ) copy ( "$from/$file", "$to/$file" ); } closedir ( $ffrom ); } } function CheckConfig ( $config, $path ) { global $g_id64, $windows; if ( $config->Requires("id64") && !$g_id64 ) { printf ( "SKIPPING %s, %s - enable id64 to run this test\n", $path, $config->Name () ); return false; } if ( $config->Requires("id32") && $g_id64 ) { printf ( "SKIPPING %s, %s - disable id64 to run this test\n", $path, $config->Name () ); return false; } if ( $config->Requires("non-windows") && $windows ) { printf ( "SKIPPING %s, %s - use non-Windows system to run this test\n", $path, $config->Name () ); return false; } if ( $config->Requires("non-rt") && $config->IsRt() ) { printf ( "SKIPPING %s, %s - explicitly non-RT test skipped in RT mode\n", $path, $config->Name () ); return false; } if ( $config->NeedIndexerEx() && $config->IsRt() ) { printf ( "SKIPPING %s, %s - non-RT test that uses indexer skipped in RT mode\n", $path, $config->Name () ); return false; } return true; } function MarkTest ( $logfile, $test_dir ) { $log = fopen ( $logfile, "a" ); fwrite ( $log, "*** in test $test_dir ***\n"); fclose ( $log ); } function RunTest ( $test_dir, $skipdemo, $usemarks ) { global $indexer_data_path, $agents, $sd_pid_file, $sd_managed_searchd, $sd_skip_indexer, $g_id64, $windows, $g_locals, $sd_log, $sd_query_log; $model_file = $test_dir."/model.bin"; $conf_dir = $test_dir."/Conf"; $config = new SphinxConfig; $lmodel = $config->LoadModel ( $model_file ); $isdemo = false; if ( $lmodel==-1 ) { if ( $skipdemo ) { printf ( "Skipping %s, - this is demo or bugreport (no model.bin file)\n", $test_dir ); return array ( "tests_total"=>0, "tests_failed"=>0, "tests_skipped"=>1 ); } $isdemo = true; } if ( !$config->Load ( $test_dir."/test.xml" ) ) return; $config->SetTestDir ( $test_dir ); $prefix = sprintf ( "testing %s, %s...", $test_dir, $config->Name () ); if ( !CheckConfig ( $config, $test_dir ) ) return array ( "tests_total"=>0, "tests_failed"=>0, "tests_skipped"=>1 ); if ( $lmodel==0 ) { printf ( "$prefix FAILED, error loading model\n" ); return; } if ( $config->IsNeedDB() ) { $connection = CreateDB ( $config->DB_Drop(), $config->DB_Create(), $config->DB_Insert(), $config->DB_CustomInsert(), $sd_skip_indexer ); if ( $connection === false ) { printf ( "$prefix FAILED, error creating test DB: %s\n", mysql_error() ); return; } $config->SetConnection($connection); } if ( !file_exists ( $conf_dir ) ) mkdir ( $conf_dir ); $report_path = "$test_dir/report"; $report_file = "$report_path.txt"; $report = fopen ( $report_file, "w" ); $nfailed = 0; $error = ""; $log = ""; // subtest failures log $nsubtests = $config->SubtestCount(); // config to pid hash, instances to stop // static is only to workaround PHP braindamage, otherwise $stop gets reset (at least on 5.2.2 under win32) static $stop = array(); $oldlog = ''; $oldquerylog = ''; if ( $isdemo ) { $oldlog = $sd_log; $oldquerylog = $sd_query_log; $sd_log = "$test_dir/searchd.log"; $sd_query_log = "$test_dir/query.log"; if (file_exists($sd_log)) unlink ($sd_log); if (file_exists($sd_query_log)) unlink ($sd_query_log); } if ( $usemarks ) { MarkTest($sd_log,$test_dir); MarkTest($sd_query_log,$test_dir); } do { // stop them all if ( !$sd_managed_searchd ) foreach ( $stop as $conf=>$pid ) StopSearchd ( $conf, $pid ); $stop = array(); // do the dew $subtest = $config->SubtestNo()+1; print ( "$prefix $subtest/$nsubtests\r" ); $config->WriteReportHeader ( $report ); $config->SetAgent ( $agents [0] ); $msg = ''; if (!$config->WriteConfig ( $conf_dir."/"."config_".$config->SubtestNo ().".conf", "all", $msg, false)) { print ("Interrupted, $msg\n"); continue; } $config->WriteConfig ( "config.conf", "all", $msg, $config->NumAgents () < 2 ); EraseDirContents ( $indexer_data_path ); if ( $config->Requires( "pre_copy_ref" ) ) CopyDirContents ( $test_dir . "/refdata/", "data/" ); if ( $config->IsSkipIndexer()===false && $sd_managed_searchd===false && $sd_skip_indexer===false ) { // standard run if ( !$config->IsRt() ) { $indexer_ret = RunIndexer ( $error, "--all" ); if ( $indexer_ret==2 ) { fwrite ( $report, "$error\n" ); } else if ( $indexer_ret!=0 ) { if ( !HandleFailure ( $config, $report, $error, $nfailed ) ) $log .= "\tsubtest $subtest: error running indexer with code $indexer_ret; see $report_file\n"; continue; } } // additional optional runs (eg for merge tests) $indexer_ret = $config->RunIndexerEx ( $error ); if ( $indexer_ret==2 ) { fwrite ( $report, "$error\n" ); } else if ( $indexer_ret!=0 ) { if ( !HandleFailure ( $config, $report, $error, $nfailed ) ) $log .= "\tsubtest $subtest: error running indexer with code $indexer_ret; see $report_file\n"; continue; } } $searchd_error = FALSE; if ( $config->NumAgents () == 1 ) { if ( $sd_managed_searchd ) $searchd_ret = 0; else $searchd_ret = StartSearchd ( "config.conf", "error.txt", $sd_pid_file, $error, $config->Requires ( "watchdog" ) ); $stop["config.conf"] = $sd_pid_file; if ( $searchd_ret == 1 ) { if ( !HandleFailure ( $config, $report, $error, $nfailed ) ) $log .= "\tsubtest $subtest: error starting searchd; see $report_file\n"; $searchd_error = TRUE; } else if ( $searchd_ret==2 ) { fwrite ( $report, "$error\n" ); } } else for ( $i = $config->NumAgents () - 1; $i >= 0 && !$searchd_error; $i-- ) { static $agent_id = 0; $agent_id++; $config_file = "config_".$agent_id.".conf"; $pid_file = "searchd_".$agent_id.".pid"; $stop[$config_file] = $pid_file; $msg = ''; $config->SetAgent ( $agents [$i] ); $config->SetPIDFile ( $pid_file ); if ( !$config->WriteConfig ( $config_file, $i, $msg ) ) continue; if ( $sd_managed_searchd ) $searchd_ret = 0; else $searchd_ret = StartSearchd ( $config_file, "error_".$agent_id.".txt", $pid_file, $error, $config->Requires ( "watchdog" ), $config->AddressAPI(), $config->Port() ); if ( $searchd_ret == 1 ) { if ( !HandleFailure ( $config, $report, $error, $nfailed ) ) $log .= "\tsubtest $subtest: error starting searchd; see $report_file\n"; $searchd_error = TRUE; } else if ( $searchd_ret==2 ) { fwrite ( $report, "$error\n" ); } } if ( $searchd_error ) continue; // in case of RT index - run "insert into" instead of indexer if ( $config->IsRt () ) $config->InsertIntoIndexer ( $error ); if ( $config->IsQueryTest () ) { $error = ""; if ( ! $config->RunQuery ( "*", $error ) ) { if ( !HandleFailure ( $config, $report, "$error\n", $nfailed ) ) $log .= "\tsubtest $subtest: query error: $error\n"; continue; } } else { if ( ! $config->RunCustomTest ( $error ) ) { if ( !HandleFailure ( $config, $report, "$error\n", $nfailed ) ) $log .= "\tsubtest $subtest: query error: $error\n"; continue; } } if ( $config->IsSphinxqlTest () ) { $error = ""; if ( ! $config->RunQuerySphinxQL ( $error ) ) { if ( !HandleFailure ( $config, $report, "$error\n", $nfailed ) ) $log .= "\tsubtest $subtest: query error: $error\n"; continue; } } $allmatch = $isdemo || IsModelGenMode() || $config->CompareToModel(); if ( !$allmatch ) { $log .= "\tsubtest $subtest: query results mismatch; see $report_file\n"; $nfailed++; } if ( $isdemo ) $log .= "\tdemo/bugreport $subtest done; see $report_file\n"; $config->WriteResults ( $report ); if ( !$allmatch ) { fwrite ( $report, "SUBTEST FAILED, RESULTS ARE DIFFERENT FROM THE REFERENCE:\n\n" ); $config->WriteReferenceResultsDiff ( $report ); } $config->SubtestFinished (); } while ( $config->CreateNextConfig () ); if ( $isdemo ) { $sd_log = $oldlog; $sd_query_log = $oldquerylog; } if ( !$sd_managed_searchd ) foreach ( $stop as $conf=>$pid ) StopSearchd ( $conf, $pid ); $total = $config->SubtestNo()+1; if ( IsModelGenMode () ) { $variant = $config->CheckVariants ( $report_path."_variant.txt" ); if ($variant===false) { $config->WriteModel ( $model_file ); printf ( "$prefix done; %d/%d subtests run\n", $config->SubtestNo(), $nsubtests ); } else { printf ( "$prefix done; %d/%d subtests: VARIANT CHECK FAILED: %s\n", $config->SubtestNo(), $nsubtests, $variant ); $nfailed = $total; } } else if ( $nfailed==0 ) printf ( "$prefix done; %d/%d subtests OK\n", $config->SubtestNo(), $nsubtests ); else printf ( "$prefix done; %d/%d subtests FAILED:\n%s", $nfailed, $nsubtests, $log ); fclose ( $report ); // cleanup DB after ourselves if ( !array_key_exists ('no_drop_db', $g_locals) && isset($connection) ) foreach ( $config->DB_Drop() as $q ) mysql_wr ( $q, $connection ); return array ( "tests_total"=>$total, "tests_failed"=>$nfailed, "tests_skipped"=>0 ); } // // $Id: helpers.inc 3122 2012-02-28 08:57:13Z klirichek $ // sphinx-2.0.4-release/test/test_103/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_103/test.xml0000644000176700017710000000604111503513132017760 0ustar deogardeogar fullscan filtering on virtual attributes indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } index lj { source = srclj path = /lj docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } lng lat @geodist $client->SetGeoAnchor ('lng','lat',-0.0798578,0.937717 ); $client->SetFilterFloatRange ('@geodist', 0.0, 200000.0 ); $results = $client->Query ( $query, $index ); CREATE TABLE `test_table` ( `id` int(11) NOT NULL auto_increment, `section` int(11) NOT NULL, `system_id` tinyint(4) NOT NULL, `adtext` varchar(255) NOT NULL, `heading` varchar(500) NOT NULL, `price` int(11) NOT NULL default '0', `postcode` varchar(10) NOT NULL, `gre` int(11) NOT NULL, `grn` int(11) NOT NULL, `str_at1` varchar(255) NOT NULL, `str_at2` varchar(255) NOT NULL, `str_at3` varchar(255) NOT NULL, `str_at4` varchar(255) NOT NULL, `str_at5` varchar(255) NOT NULL, `int_at1` int(11) default NULL, `int_at2` int(11) default NULL, `int_at3` int(11) default NULL, `int_at4` int(11) default NULL, `int_at5` int(11) default NULL, `float_at1` float default NULL, `float_at2` float default NULL, `float_at3` float default NULL, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', `make_id` int(11) NOT NULL, `transmission_id` tinyint(4) NOT NULL, PRIMARY KEY (`id`) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `section`, `system_id`, `adtext`, `heading`, `price`, `postcode`, `gre`, `grn`, `str_at1`, `str_at2`, `str_at3`, `str_at4`, `str_at5`, `int_at1`, `int_at2`, `int_at3`, `int_at4`, `int_at5`, `float_at1`, `float_at2`, `float_at3`, `lng`, `lat`, `make_id`, `transmission_id`) VALUES (1, 1, 2, 'FORD', 'Ford KA', 2790, 'EN3 5BT', 535000, 197400, 'Ford', 'KA', 'Grey', 'Diesel', '', 18662, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0798578, 0.937717, 8, 1), (2, 1, 0, 'until', 'Vauxhall Corsa', 5800, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0799989, 0.891975, 5, 0), (211250, 0, 1, 'Quattro Roadster', 'Audi TT', 13995, 'E9 7DG', 535600, 184200, '', '', '', '', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0721455, 0.926761, 29, 1); sphinx-2.0.4-release/test/test_103/model.bin0000644000176700017710000000153411455516446020074 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:6:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:7:"make_id";i:1;s:15:"transmission_id";i:1;s:8:"@geodist";i:5;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"8";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:0;}}i:211250;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:7:"make_id";s:2:"29";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:85369.6015625;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_099/0000755000176700017710000000000011724063141016301 5ustar deogardeogarsphinx-2.0.4-release/test/test_099/test.xml0000644000176700017710000001104011605620330017773 0ustar deogardeogar block index vs MVA indexer { mem_limit = 16M } searchd { } source src_base { type = mysql } source src1 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table sql_attr_multi = uint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = uint mva1 from field mva1 } source src2 : src_base { sql_query = SELECT document_id, text, attr1, mva1 FROM main_table2 sql_attr_multi = uint mva1 from field mva1 sql_attr_uint = attr1 } source src3 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table3 sql_attr_multi = uint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = uint mva1 from field mva1 } source src4 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table2 sql_attr_multi = uint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = uint mva1 from field mva1 sql_attr_str2ordinal = text } source src1 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table sql_attr_multi = bigint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = bigint mva1 from field mva1 } source src2 : src_base { sql_query = SELECT document_id, text, attr1, mva1 FROM main_table2 sql_attr_multi = bigint mva1 from field mva1 sql_attr_uint = attr1 } source src3 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table3 sql_attr_multi = bigint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = bigint mva1 from field mva1 } source src4 : src_base { sql_query = SELECT document_id, text, mva1 FROM main_table2 sql_attr_multi = bigint mva2 from query; SELECT word_document_id, word_tag_id FROM tag_table; sql_attr_multi = bigint mva1 from field mva1 sql_attr_str2ordinal = text } index main { source = src1 source = src2 source = src3 path = /main1 docinfo = extern charset_type = utf-8 min_word_len = 1 } test1 test511 test2048 test4000 CREATE TABLE `main_table` ( `document_id` int(11), `text` varchar(255) NOT NULL, `mva1` varchar(255) NOT NULL ); CREATE TABLE `main_table2` ( `document_id` int(11) DEFAULT NULL, `text` varchar(255) NOT NULL, `attr1` int(11) DEFAULT NULL, `mva1` varchar(255) NOT NULL ); CREATE TABLE `main_table3` ( `document_id` int(11), `text` varchar(255) NOT NULL, `mva1` varchar(255) NOT NULL ); CREATE TABLE `tag_table` ( `word_document_id` int(11) NOT NULL, `word_tag_id` int(11) NOT NULL ); DROP TABLE IF EXISTS `main_table` DROP TABLE IF EXISTS `main_table2` DROP TABLE IF EXISTS `main_table3` DROP TABLE IF EXISTS `tag_table` INSERT INTO `main_table` VALUES ( 1, 'test1', 'mva1' ), ( 511, 'test511', 'mva511' ), ( 2048, 'test2048', 'mva2048' ), ( 4000, 'test4000', 'mva4000' ); sphinx-2.0.4-release/test/test_099/model.bin0000644000176700017710000002162011605620330020071 0ustar deogardeogara:6:{i:0;a:1:{i:0;s:6:"failed";}i:1;a:1:{i:0;s:6:"failed";}i:2;a:11:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"101";s:4:"mva1";a:1:{i:0;s:4:"1001";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"test1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test1";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"test511";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"test511";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"test2048";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"test2048";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"test4000";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"test4000";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"101";s:4:"mva1";a:1:{i:0;s:4:"1001";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:64;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"164";s:4:"mva1";a:1:{i:0;s:4:"1064";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:65;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"165";s:4:"mva1";a:1:{i:0;s:4:"1065";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:127;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"227";s:4:"mva1";a:1:{i:0;s:4:"1127";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:128;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"228";s:4:"mva1";a:1:{i:0;s:4:"1128";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:129;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"229";s:4:"mva1";a:1:{i:0;s:4:"1129";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:130;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"230";s:4:"mva1";a:1:{i:0;s:4:"1130";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:3;a:11:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"101";s:4:"mva1";a:1:{i:0;s:4:"1001";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"test1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test1";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"test511";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"test511";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"test2048";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"test2048";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"test4000";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"test4000";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"101";s:4:"mva1";a:1:{i:0;s:4:"1001";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:64;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"164";s:4:"mva1";a:1:{i:0;s:4:"1064";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:65;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"165";s:4:"mva1";a:1:{i:0;s:4:"1065";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:127;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"227";s:4:"mva1";a:1:{i:0;s:4:"1127";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:128;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"228";s:4:"mva1";a:1:{i:0;s:4:"1128";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:129;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"229";s:4:"mva1";a:1:{i:0;s:4:"1129";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:130;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:3:"230";s:4:"mva1";a:1:{i:0;s:4:"1130";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/stopwords_121.txt0000644000176700017710000000000311401367030020076 0ustar deogardeogaraddsphinx-2.0.4-release/test/test_135/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_135/test.xml0000644000176700017710000000421111634664540020001 0ustar deogardeogar persistent MVA updates surviving after hard killing part 1 - *nix only indexer { mem_limit = 16M } searchd { binlog_flush = 2 binlog_path = workers = threads } source src { type = mysql sql_query = SELECT id, text, section, mva1 FROM test_table sql_attr_uint = section sql_attr_multi = uint mva1 from field mva1 sql_attr_multi = bigint mva1 from field mva1 } index idx_130 { source = src path = /idx_130 charset_type = utf-8 docinfo = extern } UpdateAttributes ( "idx_130", array("mva1"), array(1=>array(array(2,3,4)), 3=>array(array(6,7,8))),true); if ( $up >= 0 ) $results[] = sprintf("up.ok=%d", $up); else $results[] = sprintf("up.err=%s", $client->GetLastError()); KillSearchd ( 'config.conf', 'searchd.pid', 9, false ); usleep ( 50000 ); $error = ""; $startSta = StartSearchd ( 'config.conf', 'error.txt', 'searchd.pid', $error ); if ( $startSta == 0 || $startSta == 2 ) { $results[] = "started=ok"; } else $results[] = sprintf("start.err=%d local=%s client=%s", $startSta, $error, $client->GetLastError()); } // find the updated match $results[] = $client->Query ( "test3"); // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `section` int(11) DEFAULT NULL, `mva1` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `section`, `mva1`) VALUES (1, 'test1', 101, '1001'), (2, 'test2', 102, '1002 1023 4456'), (3, 'test3', 103, '1003 1008 1010'), (4, 'test4', 104, '1004 1005 1006'); sphinx-2.0.4-release/test/test_135/model.bin0000644000176700017710000000232611626241641020071 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:13:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:10:"started=ok";i:3;s:11:"iteration=1";i:4;s:7:"up.ok=2";i:5;s:10:"started=ok";i:6;s:11:"iteration=2";i:7;s:7:"up.ok=2";i:8;s:10:"started=ok";i:9;s:11:"iteration=3";i:10;s:7:"up.ok=2";i:11;s:10:"started=ok";i:12;a:6:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:7:"section";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:7:"section";i:103;s:4:"mva1";a:3:{i:0;i:6;i:1;i:7;i:2;i:8;}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"test3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:13:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:10:"started=ok";i:3;s:11:"iteration=1";i:4;s:7:"up.ok=2";i:5;s:10:"started=ok";i:6;s:11:"iteration=2";i:7;s:7:"up.ok=2";i:8;s:10:"started=ok";i:9;s:11:"iteration=3";i:10;s:7:"up.ok=2";i:11;s:10:"started=ok";i:12;a:6:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:7:"section";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:7:"section";i:103;s:4:"mva1";a:3:{i:0;i:6;i:1;i:7;i:2;i:8;}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"test3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test-templates.xml0000644000176700017710000000216711560056631020430 0ustar deogardeogar regular index test template indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index test { source = test path = /test } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 123, 'hello world' ); hello RT: test template indexer { mem_limit = 16M } searchd { workers = threads } index test { type = rt path = data/test rt_attr_uint = gid rt_field = title } insert into test ( id, gid, title ) values ( 1, 123, 'hello world' ) select * from test sphinx-2.0.4-release/test/test_106/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_106/test.xml0000644000176700017710000000266411324645270020004 0ustar deogardeogar extended sort with more than 1 sorter indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query_pre = SET time_zone='+0:00' sql_query = SELECT id, UNIX_TIMESTAMP(date_added) as date_added, text FROM test_table sql_attr_timestamp = date_added } index test { source = srctest path = /test docinfo = extern } CREATE TABLE `test_table` ( id int(11) NOT NULL default '0', date_added date not null, text varchar(255) not null default 'text' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, '2008-10-01', 'one' ), ( 2, '2008-10-02', 'one two' ), ( 3, '2008-10-03', 'one two three' ), ( 4, '2008-10-01', 'one' ), ( 5, '2008-10-02', 'one two' ), ( 6, '2008-10-03', 'one two three' ), ( 7, '2008-10-07', 'one' ), ( 8, '2008-10-08', 'one two' ), ( 9, '2008-10-09', 'one two three' ); one|two|three one|two|three one|two|three sphinx-2.0.4-release/test/test_106/model.bin0000644000176700017710000000720011324645270020064 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:10:"date_added";i:2;}s:7:"matches";a:9:{i:9;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1223510400;}}i:8;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1223424000;}}i:7;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1223337600;}}i:3;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:6;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:2;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:5;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:1;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}i:4;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}}s:5:"total";s:1:"9";s:11:"total_found";s:1:"9";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"one|two|three";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:10:"date_added";i:2;}s:7:"matches";a:9:{i:9;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1223510400;}}i:6;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:3;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:8;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1223424000;}}i:5;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:2;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:7;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1223337600;}}i:4;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}i:1;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}}s:5:"total";s:1:"9";s:11:"total_found";s:1:"9";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"one|two|three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:10:"date_added";i:2;}s:7:"matches";a:9:{i:9;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1223510400;}}i:6;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:3;a:2:{s:6:"weight";s:4:"3442";s:5:"attrs";a:1:{s:10:"date_added";i:1222992000;}}i:8;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1223424000;}}i:5;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:2;a:2:{s:6:"weight";s:4:"2414";s:5:"attrs";a:1:{s:10:"date_added";i:1222905600;}}i:7;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1223337600;}}i:4;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}i:1;a:2:{s:6:"weight";s:4:"1427";s:5:"attrs";a:1:{s:10:"date_added";i:1222819200;}}}s:5:"total";s:1:"9";s:11:"total_found";s:1:"9";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"one|two|three";}}}sphinx-2.0.4-release/test/test_008/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_008/test.xml0000644000176700017710000000601311503513132017763 0ustar deogardeogar @geodist sorting indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } index lj { source = srclj path = /lj docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } lng lat @geodist $client->SetGeoAnchor ('lng','lat',-0.0798578,0.937717 ); $client->SetSortMode ( SPH_SORT_EXTENDED, '@geodist DESC' ); $results = $client->Query ( $query, $index ); CREATE TABLE `test_table` ( `id` int(11) NOT NULL auto_increment, `section` int(11) NOT NULL, `system_id` tinyint(4) NOT NULL, `adtext` varchar(255) NOT NULL, `heading` varchar(500) NOT NULL, `price` int(11) NOT NULL default '0', `postcode` varchar(10) NOT NULL, `gre` int(11) NOT NULL, `grn` int(11) NOT NULL, `str_at1` varchar(255) NOT NULL, `str_at2` varchar(255) NOT NULL, `str_at3` varchar(255) NOT NULL, `str_at4` varchar(255) NOT NULL, `str_at5` varchar(255) NOT NULL, `int_at1` int(11) default NULL, `int_at2` int(11) default NULL, `int_at3` int(11) default NULL, `int_at4` int(11) default NULL, `int_at5` int(11) default NULL, `float_at1` float default NULL, `float_at2` float default NULL, `float_at3` float default NULL, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', `make_id` int(11) NOT NULL, `transmission_id` tinyint(4) NOT NULL, PRIMARY KEY (`id`) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `section`, `system_id`, `adtext`, `heading`, `price`, `postcode`, `gre`, `grn`, `str_at1`, `str_at2`, `str_at3`, `str_at4`, `str_at5`, `int_at1`, `int_at2`, `int_at3`, `int_at4`, `int_at5`, `float_at1`, `float_at2`, `float_at3`, `lng`, `lat`, `make_id`, `transmission_id`) VALUES (1, 1, 2, 'FORD', 'Ford KA', 2790, 'EN3 5BT', 535000, 197400, 'Ford', 'KA', 'Grey', 'Diesel', '', 18662, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0798578, 0.937717, 8, 1), (2, 1, 0, 'until', 'Vauxhall Corsa', 5800, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0799989, 0.891975, 5, 0), (211250, 0, 1, 'Quattro Roadster', 'Audi TT', 13995, 'E9 7DG', 535600, 184200, '', '', '', '', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0721455, 0.926761, 29, 1); sphinx-2.0.4-release/test/test_008/model.bin0000644000176700017710000000211011455516446020067 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:6:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:7:"make_id";i:1;s:15:"transmission_id";i:1;s:8:"@geodist";i:5;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"5";s:15:"transmission_id";s:1:"0";s:8:"@geodist";d:291086.09375;}}i:211250;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:7:"make_id";s:2:"29";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:85369.6015625;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"8";s:15:"transmission_id";s:1:"1";s:8:"@geodist";d:0;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_110/0000755000176700017710000000000011724063141016261 5ustar deogardeogarsphinx-2.0.4-release/test/test_110/test.xml0000644000176700017710000000374111420613070017762 0ustar deogardeogar naive guess about the block index indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = tag } index idx { source = src path = /main charset_type = utf-8 } one one CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) INSERT INTO test_table VALUES ( 1,1,'' ), ( 2,2,'' ), ( 3,3,'' ), ( 4,4,'' ) INSERT INTO test_table SELECT document_id+4, tag+4, body FROM test_table INSERT INTO test_table SELECT document_id+8, tag+8, body FROM test_table INSERT INTO test_table SELECT document_id+16, tag+16, body FROM test_table INSERT INTO test_table SELECT document_id+32, tag+32, body FROM test_table INSERT INTO test_table SELECT document_id+64, tag+64, body FROM test_table INSERT INTO test_table VALUES ( 200,200,'one' ), ( 201,201,'one two' ), ( 202,202,'one three' ), ( 203,203,'one four' ) DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_110/model.bin0000644000176700017710000000206611330561733020062 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:4:{i:200;a:2:{s:6:"weight";s:4:"1661";s:5:"attrs";a:1:{s:3:"tag";i:200;}}i:201;a:2:{s:6:"weight";s:4:"1661";s:5:"attrs";a:1:{s:3:"tag";i:201;}}i:202;a:2:{s:6:"weight";s:4:"1661";s:5:"attrs";a:1:{s:3:"tag";i:202;}}i:203;a:2:{s:6:"weight";s:4:"1661";s:5:"attrs";a:1:{s:3:"tag";i:203;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"one";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1661";s:5:"attrs";a:1:{s:3:"tag";i:200;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"one";}}}sphinx-2.0.4-release/test/test_088/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_088/test.xml0000644000176700017710000000144611455516446020021 0ustar deogardeogar SphinxQL: batch INSERT indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_field = title rt_field = content } insert into test values (1,'title1','content1'),(2,'title2','content2'),(3,'title3','content3') insert into test (id,title) values (4,'title1'),(5,'title5'),(6,'title6') insert into test (id,content) values (7,'content7'),(8,'content8'),(9,'content9') insert into test (title,content) values ('title','failure'),('title1','failure') select * from test sphinx-2.0.4-release/test/test_088/model.bin0000644000176700017710000000220311455516446020102 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:2:{s:8:"sphinxql";s:95:"insert into test values (1,'title1','content1'),(2,'title2','content2'),(3,'title3','content3')";s:14:"total_affected";i:3;}i:1;a:2:{s:8:"sphinxql";s:73:"insert into test (id,title) values (4,'title1'),(5,'title5'),(6,'title6')";s:14:"total_affected";i:3;}i:2;a:2:{s:8:"sphinxql";s:81:"insert into test (id,content) values (7,'content7'),(8,'content8'),(9,'content9')";s:14:"total_affected";i:3;}i:3;a:3:{s:8:"sphinxql";s:80:"insert into test (title,content) values ('title','failure'),('title1','failure')";s:5:"error";s:39:"column list must contain an 'id' column";s:5:"errno";i:1064;}i:4;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";}}}}}sphinx-2.0.4-release/test/test_139/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_139/test.xml0000644000176700017710000000202511432713622017776 0ustar deogardeogar string attributes vs buffer ovverun indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd sql_attr_string = attr1 sql_attr_string = attr2 } index test { source = test path = /test docinfo = extern } CREATE TABLE test_table ( id INT NOT NULL, text VARCHAR(255) NOT NULL, idd INT NOT NULL, attr1 VARCHAR(5000) NOT NULL, attr2 VARCHAR(5000) NOT NULL ); DROP TABLE IF EXISTS test_table; select * from test sphinx-2.0.4-release/test/test_139/model.bin0000644000176700017710000002111211455516446020077 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:5:"attr1";s:4281:" overrun1 overrun2 overrun3 overrun4 overrun5 overrun6 overrun7 overrun8 overrun9 overrun10 overrun11 overrun12 overrun13 overrun14 overrun15 overrun16 overrun17 overrun18 overrun19 overrun20 overrun21 overrun22 overrun23 overrun24 overrun25 overrun26 overrun27 overrun28 overrun29 overrun30 overrun31 overrun32 overrun33 overrun34 overrun35 overrun36 overrun37 overrun38 overrun39 overrun40 overrun41 overrun42 overrun43 overrun44 overrun45 overrun46 overrun47 overrun48 overrun49 overrun50 overrun51 overrun52 overrun53 overrun54 overrun55 overrun56 overrun57 overrun58 overrun59 overrun60 overrun61 overrun62 overrun63 overrun64 overrun65 overrun66 overrun67 overrun68 overrun69 overrun70 overrun71 overrun72 overrun73 overrun74 overrun75 overrun76 overrun77 overrun78 overrun79 overrun80 overrun81 overrun82 overrun83 overrun84 overrun85 overrun86 overrun87 overrun88 overrun89 overrun90 overrun91 overrun92 overrun93 overrun94 overrun95 overrun96 overrun97 overrun98 overrun99 overrun100 overrun101 overrun102 overrun103 overrun104 overrun105 overrun106 overrun107 overrun108 overrun109 overrun110 overrun111 overrun112 overrun113 overrun114 overrun115 overrun116 overrun117 overrun118 overrun119 overrun120 overrun121 overrun122 overrun123 overrun124 overrun125 overrun126 overrun127 overrun128 overrun129 overrun130 overrun131 overrun132 overrun133 overrun134 overrun135 overrun136 overrun137 overrun138 overrun139 overrun140 overrun141 overrun142 overrun143 overrun144 overrun145 overrun146 overrun147 overrun148 overrun149 overrun150 overrun151 overrun152 overrun153 overrun154 overrun155 overrun156 overrun157 overrun158 overrun159 overrun160 overrun161 overrun162 overrun163 overrun164 overrun165 overrun166 overrun167 overrun168 overrun169 overrun170 overrun171 overrun172 overrun173 overrun174 overrun175 overrun176 overrun177 overrun178 overrun179 overrun180 overrun181 overrun182 overrun183 overrun184 overrun185 overrun186 overrun187 overrun188 overrun189 overrun190 overrun191 overrun192 overrun193 overrun194 overrun195 overrun196 overrun197 overrun198 overrun199 overrun200 overrun201 overrun202 overrun203 overrun204 overrun205 overrun206 overrun207 overrun208 overrun209 overrun210 overrun211 overrun212 overrun213 overrun214 overrun215 overrun216 overrun217 overrun218 overrun219 overrun220 overrun221 overrun222 overrun223 overrun224 overrun225 overrun226 overrun227 overrun228 overrun229 overrun230 overrun231 overrun232 overrun233 overrun234 overrun235 overrun236 overrun237 overrun238 overrun239 overrun240 overrun241 overrun242 overrun243 overrun244 overrun245 overrun246 overrun247 overrun248 overrun249 overrun250 overrun251 overrun252 overrun253 overrun254 overrun255 overrun256 overrun257 overrun258 overrun259 overrun260 overrun261 overrun262 overrun263 overrun264 overrun265 overrun266 overrun267 overrun268 overrun269 overrun270 overrun271 overrun272 overrun273 overrun274 overrun275 overrun276 overrun277 overrun278 overrun279 overrun280 overrun281 overrun282 overrun283 overrun284 overrun285 overrun286 overrun287 overrun288 overrun289 overrun290 overrun291 overrun292 overrun293 overrun294 overrun295 overrun296 overrun297 overrun298 overrun299 overrun300 overrun301 overrun302 overrun303 overrun304 overrun305 overrun306 overrun307 overrun308 overrun309 overrun310 overrun311 overrun312 overrun313 overrun314 overrun315 overrun316 overrun317 overrun318 overrun319 overrun320 overrun321 overrun322 overrun323 overrun324 overrun325 overrun326 overrun327 overrun328 overrun329 overrun330 overrun331 overrun332 overrun333 overrun334 overrun335 overrun336 overrun337 overrun338 overrun339 overrun340 overrun341 overrun342 overrun343 overrun344 overrun345 overrun346 overrun347 overrun348 overrun349 overrun350 overrun351 overrun352 overrun353 overrun354 overrun355 overrun356 overrun357 overrun358 overrun359 overrun360 overrun361 overrun362 overrun363 overrun364 overrun365 overrun366 overrun367 overrun368 overrun369 overrun370 overrun371 overrun372 overrun373 overrun374 overrun375 overrun376 overrun377 overrun378 overrun379 overrun380 overrun381 overrun382 overrun383 overrun384 overrun385 overrun386 overrun387 overrun388 overrun389 overrun390 overrun391 overrun392 overrun393 overrun394 overrun395 overrun396 overrun397 overrun398 overrun399";s:5:"attr2";s:4281:" overrun1 overrun2 overrun3 overrun4 overrun5 overrun6 overrun7 overrun8 overrun9 overrun10 overrun11 overrun12 overrun13 overrun14 overrun15 overrun16 overrun17 overrun18 overrun19 overrun20 overrun21 overrun22 overrun23 overrun24 overrun25 overrun26 overrun27 overrun28 overrun29 overrun30 overrun31 overrun32 overrun33 overrun34 overrun35 overrun36 overrun37 overrun38 overrun39 overrun40 overrun41 overrun42 overrun43 overrun44 overrun45 overrun46 overrun47 overrun48 overrun49 overrun50 overrun51 overrun52 overrun53 overrun54 overrun55 overrun56 overrun57 overrun58 overrun59 overrun60 overrun61 overrun62 overrun63 overrun64 overrun65 overrun66 overrun67 overrun68 overrun69 overrun70 overrun71 overrun72 overrun73 overrun74 overrun75 overrun76 overrun77 overrun78 overrun79 overrun80 overrun81 overrun82 overrun83 overrun84 overrun85 overrun86 overrun87 overrun88 overrun89 overrun90 overrun91 overrun92 overrun93 overrun94 overrun95 overrun96 overrun97 overrun98 overrun99 overrun100 overrun101 overrun102 overrun103 overrun104 overrun105 overrun106 overrun107 overrun108 overrun109 overrun110 overrun111 overrun112 overrun113 overrun114 overrun115 overrun116 overrun117 overrun118 overrun119 overrun120 overrun121 overrun122 overrun123 overrun124 overrun125 overrun126 overrun127 overrun128 overrun129 overrun130 overrun131 overrun132 overrun133 overrun134 overrun135 overrun136 overrun137 overrun138 overrun139 overrun140 overrun141 overrun142 overrun143 overrun144 overrun145 overrun146 overrun147 overrun148 overrun149 overrun150 overrun151 overrun152 overrun153 overrun154 overrun155 overrun156 overrun157 overrun158 overrun159 overrun160 overrun161 overrun162 overrun163 overrun164 overrun165 overrun166 overrun167 overrun168 overrun169 overrun170 overrun171 overrun172 overrun173 overrun174 overrun175 overrun176 overrun177 overrun178 overrun179 overrun180 overrun181 overrun182 overrun183 overrun184 overrun185 overrun186 overrun187 overrun188 overrun189 overrun190 overrun191 overrun192 overrun193 overrun194 overrun195 overrun196 overrun197 overrun198 overrun199 overrun200 overrun201 overrun202 overrun203 overrun204 overrun205 overrun206 overrun207 overrun208 overrun209 overrun210 overrun211 overrun212 overrun213 overrun214 overrun215 overrun216 overrun217 overrun218 overrun219 overrun220 overrun221 overrun222 overrun223 overrun224 overrun225 overrun226 overrun227 overrun228 overrun229 overrun230 overrun231 overrun232 overrun233 overrun234 overrun235 overrun236 overrun237 overrun238 overrun239 overrun240 overrun241 overrun242 overrun243 overrun244 overrun245 overrun246 overrun247 overrun248 overrun249 overrun250 overrun251 overrun252 overrun253 overrun254 overrun255 overrun256 overrun257 overrun258 overrun259 overrun260 overrun261 overrun262 overrun263 overrun264 overrun265 overrun266 overrun267 overrun268 overrun269 overrun270 overrun271 overrun272 overrun273 overrun274 overrun275 overrun276 overrun277 overrun278 overrun279 overrun280 overrun281 overrun282 overrun283 overrun284 overrun285 overrun286 overrun287 overrun288 overrun289 overrun290 overrun291 overrun292 overrun293 overrun294 overrun295 overrun296 overrun297 overrun298 overrun299 overrun300 overrun301 overrun302 overrun303 overrun304 overrun305 overrun306 overrun307 overrun308 overrun309 overrun310 overrun311 overrun312 overrun313 overrun314 overrun315 overrun316 overrun317 overrun318 overrun319 overrun320 overrun321 overrun322 overrun323 overrun324 overrun325 overrun326 overrun327 overrun328 overrun329 overrun330 overrun331 overrun332 overrun333 overrun334 overrun335 overrun336 overrun337 overrun338 overrun339 overrun340 overrun341 overrun342 overrun343 overrun344 overrun345 overrun346 overrun347 overrun348 overrun349 overrun350 overrun351 overrun352 overrun353 overrun354 overrun355 overrun356 overrun357 overrun358 overrun359 overrun360 overrun361 overrun362 overrun363 overrun364 overrun365 overrun366 overrun367 overrun368 overrun369 overrun370 overrun371 overrun372 overrun373 overrun374 overrun375 overrun376 overrun377 overrun378 overrun379 overrun380 overrun381 overrun382 overrun383 overrun384 overrun385 overrun386 overrun387 overrun388 overrun389 overrun390 overrun391 overrun392 overrun393 overrun394 overrun395 overrun396 overrun397 overrun398 overrun399";}}}}}sphinx-2.0.4-release/test/test_104/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_104/test.xml0000644000176700017710000000231411327565006017773 0ustar deogardeogar persistent connections - *nix only indexer { mem_limit = 16M } searchd { client_timeout = 5 client_timeout = 1 workers = fork workers = prefork } source srctest { type = mysql sql_query = SELECT id, body FROM test_table } index test_idx { source = srctest path = /test } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test' ) Open(); $results [] = $client->Query ( 'test' ); $results [] = $client->GetLastError(); sleep ( 2 ); $results [] = $client->Query ( 'test' ); $results [] = $client->GetLastError(); $results [] = $client->Close(); foreach ( $results as &$r ) if ( is_array($r) ) unset ( $r['time'] ); ]]> sphinx-2.0.4-release/test/test_104/model.bin0000644000176700017710000000521611327565006020070 0ustar deogardeogara:4:{i:0;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:1;}}i:1;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:0;}}i:2;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:1;}}i:3;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:0;}}}sphinx-2.0.4-release/test/test_079/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_079/test.xml0000644000176700017710000000231511503513132017774 0ustar deogardeogar merge vs out-of-bounds dictionary reads indexer { mem_limit = 16M } searchd { } source delta { type = mysql sql_query = select id, text from sph_test; } index delta { source = delta path = /delta charset_table = W,0..9 } source main { type = mysql sql_query = select 2, 'W0000' as text; } index main { source = main path = /main charset_table = W,0..9 } --merge main delta create table sph_test ( id int not null, text text(262144) not null ) ENGINE=MYISAM drop table if exists sph_test; W0000 W1023 W1024 W1025 W1188 W1498 W1499 sphinx-2.0.4-release/test/test_079/model.bin0000644000176700017710000000541611236030270020072 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W0000";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W0000";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1023";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1023";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1024";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1024";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1025";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1025";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1188";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1188";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1498";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1498";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"W1499";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"W1499";}}}sphinx-2.0.4-release/test/test_013/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_013/test.xml0000644000176700017710000000341711502165706017775 0ustar deogardeogar prefixes/infixes + UTF-8 indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table sql_query_pre = SET NAMES utf8 } index lj { source = srclj path = /lj charset_type = utf-8 min_infix_len = 3 enable_star = 1 } чиÑловым *фры умол* *пиÑ* *hod* CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) CHARACTER SET utf8; DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES (1,1,'ПоиÑк по чиÑловым значениÑм','У Ð¼ÐµÐ½Ñ Ð²Ð¾Ð¿Ñ€Ð¾Ñ Ð½Ð°Ñчет поиÑка по чиÑловым значениÑм.','CAJAX'), (2,2,'ПоиÑк по чиÑловым значениÑм','Ð’ базе данных вÑтречаютÑÑ Ð·Ð°Ð¿Ð¸Ñи Ñ Ñ‡Ð¸Ñлами, но поиÑк по ним не дает никаких результатов.','CAJAX'), (3,3,'ПоиÑк по чиÑловым значениÑм','Да, по умолчанию цифры ÑчитаютÑÑ ÐºÐ¾Ñ€Ñ€ÐµÐºÑ‚Ð½Ñ‹Ð¼Ð¸ Ñимволами.','shodan') sphinx-2.0.4-release/test/test_013/model.bin0000644000176700017710000000410410730077402020055 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:16:"чиÑловым";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:5:"query";s:16:"чиÑловым";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"*фры";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"*фры";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"умол*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:9:"умол*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"*пиÑ*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:8:"*пиÑ*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*hod*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*hod*";}}}sphinx-2.0.4-release/test/test_184/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_184/test.xml0000644000176700017710000000717311705100515020001 0ustar deogardeogar expression based ranker indexer { mem_limit = 16M } searchd { compat_sphinxql_magics = 0 workers = threads binlog_path = # } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid sql_field_string = title } index test { source = test path = /test } index dist { type = distributed agent = :test agent_connect_timeout = 1000 agent_query_timeout = 3000 } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL, content VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 100, 1, 'Seven lies multiplied by seven', '' ), ( 101, 1, 'Multiplied by seven again', '' ), ( 102, 1, 'Seven angels with seven trumpets', '' ), ( 103, 1, 'Send them home on a morning train', '' ), ( 104, 1, 'Well who is that shouting?', '' ), ( 105, 1, 'John the Revelator', '' ), ( 106, 1, 'All he ever gives us is pain', '' ), ( 107, 1, 'Well who is that shouting?', '' ), ( 108, 1, 'John the Revelator', '' ), ( 109, 1, 'He should bow his head in shame', '' ), ( 110, 2, 'Mary vs Lamb', 'Mary had a little lamb little lamb little lamb' ), ( 111, 2, 'Mary vs Lamb 2: Return of The Lamb', '...whose fleece was white as snow' ), ( 112, 2, 'Mary vs Lamb 3: The Resurrection', 'Snow! Bloody snow!' ) select id, gid, weight(), title from test where match('seven') and gid=1 select id, gid, weight(), title from test where match('seven') and gid=1 option ranker=expr('sum(lcs)*1000+bm25') select id, gid, weight(), title from test where match('seven') and gid=1 option ranker=expr('sum(300*lcs+700*lcs)+bm25') select id, gid, weight(), title from test where match('seven|lies') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from test where match('seven seven seven seven') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from test where match('seven !se7en') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from test where match('seven !(angels !by)') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from test where match('lamb') and gid=2 option ranker=expr('doc_word_count*1000+sum(word_count)') select id, gid, weight(), title from dist where match('seven|lies') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from dist where match('seven !(angels !by)') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)') select id, gid, weight(), title from dist where match('lamb') and gid=2 option ranker=expr('doc_word_count*1000+sum(word_count)') sphinx-2.0.4-release/test/test_184/model.bin0000644000176700017710000001370211705100515020065 0ustar deogardeogara:1:{i:0;a:11:{i:0;a:3:{s:8:"sphinxql";s:72:"select id, gid, weight(), title from test where match('seven') and gid=1";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1611";s:5:"title";s:25:"Multiplied by seven again";}}}i:1;a:3:{s:8:"sphinxql";s:113:"select id, gid, weight(), title from test where match('seven') and gid=1 option ranker=expr('sum(lcs)*1000+bm25')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1611";s:5:"title";s:25:"Multiplied by seven again";}}}i:2;a:3:{s:8:"sphinxql";s:120:"select id, gid, weight(), title from test where match('seven') and gid=1 option ranker=expr('sum(300*lcs+700*lcs)+bm25')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1653";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:4:"1611";s:5:"title";s:25:"Multiplied by seven again";}}}i:3;a:3:{s:8:"sphinxql";s:154:"select id, gid, weight(), title from test where match('seven|lies') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"223";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"212";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"211";s:5:"title";s:25:"Multiplied by seven again";}}}i:4;a:3:{s:8:"sphinxql";s:167:"select id, gid, weight(), title from test where match('seven seven seven seven') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"112";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"112";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"111";s:5:"title";s:25:"Multiplied by seven again";}}}i:5;a:3:{s:8:"sphinxql";s:156:"select id, gid, weight(), title from test where match('seven !se7en') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"112";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"112";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"111";s:5:"title";s:25:"Multiplied by seven again";}}}i:6;a:3:{s:8:"sphinxql";s:163:"select id, gid, weight(), title from test where match('seven !(angels !by)') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"212";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"211";s:5:"title";s:25:"Multiplied by seven again";}}}i:7;a:3:{s:8:"sphinxql";s:129:"select id, gid, weight(), title from test where match('lamb') and gid=2 option ranker=expr('doc_word_count*1000+sum(word_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"110";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1002";s:5:"title";s:12:"Mary vs Lamb";}i:1;a:4:{s:2:"id";s:3:"111";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1001";s:5:"title";s:34:"Mary vs Lamb 2: Return of The Lamb";}i:2;a:4:{s:2:"id";s:3:"112";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1001";s:5:"title";s:32:"Mary vs Lamb 3: The Resurrection";}}}i:8;a:3:{s:8:"sphinxql";s:154:"select id, gid, weight(), title from dist where match('seven|lies') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"223";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"102";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"212";s:5:"title";s:32:"Seven angels with seven trumpets";}i:2;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"211";s:5:"title";s:25:"Multiplied by seven again";}}}i:9;a:3:{s:8:"sphinxql";s:163:"select id, gid, weight(), title from dist where match('seven !(angels !by)') and gid=1 option ranker=expr('query_word_count*100+sum(word_count)*10+sum(hit_count)')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:3:"100";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"212";s:5:"title";s:30:"Seven lies multiplied by seven";}i:1;a:4:{s:2:"id";s:3:"101";s:3:"gid";s:1:"1";s:8:"weight()";s:3:"211";s:5:"title";s:25:"Multiplied by seven again";}}}i:10;a:3:{s:8:"sphinxql";s:129:"select id, gid, weight(), title from dist where match('lamb') and gid=2 option ranker=expr('doc_word_count*1000+sum(word_count)')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:3:"110";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1002";s:5:"title";s:12:"Mary vs Lamb";}i:1;a:4:{s:2:"id";s:3:"111";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1001";s:5:"title";s:34:"Mary vs Lamb 2: Return of The Lamb";}i:2;a:4:{s:2:"id";s:3:"112";s:3:"gid";s:1:"2";s:8:"weight()";s:4:"1001";s:5:"title";s:32:"Mary vs Lamb 3: The Resurrection";}}}}}sphinx-2.0.4-release/test/test_045/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_045/test.xml0000644000176700017710000000415111323711623017772 0ustar deogardeogar kill-list (part 2) indexer { mem_limit = 16M } searchd { } source src_main { type = mysql sql_query = SELECT * FROM test_table } source src_delta1 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=6 sql_query_killlist = SELECT 1 } source src_delta2 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=7 sql_query_killlist = SELECT 2 } source src_delta3 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=8 sql_query_killlist = SELECT 3 } index idx_main { source = src_main path = /test_main } index idx_delta1 { source = src_delta1 path = /test_delta1 } index idx_delta2 { source = src_delta2 path = /test_delta2 } index idx_delta3 { source = src_delta3 path = /test_delta3 } CREATE TABLE test_table ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 'test one' ), ( 2, 'test two' ), ( 3, 'test three' ), ( 4, 'test four' ), ( 5, 'test five' ), ( 6, 'test delta1' ), ( 7, 'test delta2' ), ( 8, 'test delta3' ) SetFilter ( "@id", array(1,2,3,4) ); $client->AddQuery ( "test", "idx_main idx_delta1 idx_delta2 idx_delta3" ); $client->ResetFilters (); $client->SetFilter ( "@id", array(5,6,7,8) ); $client->AddQuery ( "test", "idx_main idx_delta1 idx_delta2 idx_delta3" ); $client->ResetFilters (); $client->SetFilter ( "@id", array(1,2,3,4,5,6,7,8), "idx_main idx_delta1 idx_delta2 idx_delta3" ); $client->SetSortMode ( SPH_SORT_EXTENDED, "@id DESC" ); $client->AddQuery ( "test" ); $results = $client->RunQueries (); if ( is_array($results) ) for ( $i=0; $i sphinx-2.0.4-release/test/test_045/model.bin0000644000176700017710000000242311070750004020056 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}}}}}sphinx-2.0.4-release/test/test_115/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_115/test.xml0000644000176700017710000001164011531476750020003 0ustar deogardeogar NEAR syntax indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table } index idx { source = src path = /main charset_type = utf-8 blend_chars = U+002D # '-' } "a b" NEAR/2 "c d" "c d" NEAR/2 "a b" a b NEAR/2 c d a NEAR/2 b NEAR/5 c NEAR/2 d a NEAR/3 b NEAR/3 c NEAR/3 d a NEAR/3 d NEAR/3 b NEAR/3 c (a b c) NEAR/3 d burden NEAR/2 (financial share) burden NEAR/2 (share financial) (share financial) NEAR/2 burden (financial share) NEAR/2 burden "a b" NEAR/2 "c d" NEAR/2 "f g" a NEAR/3 b NEAR/3 c NEAR/3 d "ARE NEAR" "ARE NEAR" "ARE NEAR" "ARE NEAR" "ARE NEAR" five NEAR/3 one six NEAR/3 one (five|six) NEAR/3 one aleph NEAR/2 gimel bet NEAR/3 he aleph-bet-gimel NEAR/3 he select * from idx where match ('aleph NEAR/2 gimel') select * from idx where match ('bet NEAR/3 he') select * from idx where match ('bet NEAR/2 he') select * from idx where match ('aleph-bet-gimel NEAR/4 he') select * from idx where match ('aleph-bet-gimel NEAR/3 he') call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'gimel NEAR/2 he', 1 as query_mode) call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'bet NEAR/3 he', 1 as query_mode) call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'aleph NEAR/2 gimel', 1 as query_mode) CALL SNIPPETS ('U.S.S.S.B s s s s today s s s s s tomorrow', 'idx', 'B NEAR/7 today', 1 as query_mode) CALL SNIPPETS ('U.S.S.S.A s s s s today s s s s s tomorrow', 'idx', 'A NEAR/7 today', 1 as query_mode) select * from idx where match ('oy NEAR/1 vey') and @id>=22 CREATE TABLE test_table ( id INTEGER AUTO_INCREMENT PRIMARY KEY NOT NULL, title VARCHAR(16384) NOT NULL ); INSERT INTO `test_table` VALUES ( 1, 'a b c d' ), ( 2, 'a x b c d' ), ( 3, 'a x x b c d' ), ( 4, 'a b x c d'), ( 5, 'a b x x c d' ), ( 6, 'a b x x x c d' ), ( 7, 'a b x x x x c d' ), ( 8, 'a x b x x x x c x d' ), ( 9, 'a x x b x x x c x x d' ), ( 10, 'c d x x x x a b' ), ( 11, 'c d x x x a b' ), ( 12, 'c d x x a b' ), ( 13, 'c d x a b' ), ( 14, 'c d a b' ), ( 15, '... is the clearinghouse associated with such exchange. In general, clearinghouses are backed by the corporate members of the clearinghouse who are required to share any financial burden resulting from the non-performance by one of their members and, as such, should significantly reduce this credit risk. In cases where the clearinghouse... is the clearinghouse associated with such exchange. In general, clearinghouses are backed by the corporate members of the clearinghouse who are required to share any financial burden resulting from the non-performance by one of their members and, as such, should significantly reduce this credit risk. In cases where the clearinghouse... be able to meet its obligations to a Trading Company. The counterparty for futures contracts traded in the United States and on most foreign exchanges is the clearinghouse associated with such exchange. In general, clearinghouses are backed by the corporate members of the clearinghouse who are required to share any financial...' ), ( 16, 'one two three four five six seven eight nine ten eleven twelve thirteen fourteen fifteena' ), ( 17, 'aleph-bet-gimel dalet he wav zajin het' ), ( 20, 'ein oy vey' ), ( 21, CONCAT('zwei ',REPEAT('oy vey ho ho ho ',1024)) ), ( 22, 'oy vey drei' ) DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_115/model.bin0000644000176700017710000004051511531476750020077 0ustar deogardeogara:1:{i:0;a:35:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.042";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:""a b" NEAR/2 "c d"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:""c d" NEAR/2 "a b"";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2444";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:"a b NEAR/2 c d";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:11:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:2:"11";s:11:"total_found";s:2:"11";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"a NEAR/2 b NEAR/5 c NEAR/2 d";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"a NEAR/3 b NEAR/3 c NEAR/3 d";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"a NEAR/3 d NEAR/3 b NEAR/3 c";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"(a b c) NEAR/3 d";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:4:"3836";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:6:"burden";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:9:"financial";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:5:"share";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"burden NEAR/2 (financial share)";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:4:"3836";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:6:"burden";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:5:"share";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:9:"financial";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:32:"burden NEAR/2 (share financial) ";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:4:"3836";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"share";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:9:"financial";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:6:"burden";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"(share financial) NEAR/2 burden";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:4:"3836";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:9:"financial";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:5:"share";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"3";}s:6:"burden";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"(financial share) NEAR/2 burden";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:6:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"f";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:1:"g";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:""a b" NEAR/2 "c d" NEAR/2 "f g"";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"4444";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:1:"a";a:2:{s:4:"docs";s:2:"15";s:4:"hits";s:2:"15";}s:1:"b";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"c";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}s:1:"d";a:2:{s:4:"docs";s:2:"14";s:4:"hits";s:2:"14";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"a NEAR/3 b NEAR/3 c NEAR/3 d";}i:13;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"are";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"6";}s:4:"near";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""ARE NEAR"";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"are";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"6";}s:4:"near";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""ARE NEAR"";}i:15;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"are";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"6";}s:4:"near";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""ARE NEAR"";}i:16;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"are";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"6";}s:4:"near";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""ARE NEAR"";}i:17;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"are";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"6";}s:4:"near";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""ARE NEAR"";}i:18;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:"five NEAR/3 one";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:"six NEAR/3 one";}i:20;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:"(five|six) NEAR/3 one";}i:21;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:17;a:2:{s:6:"weight";s:4:"2723";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:5:"aleph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"gimel";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:"aleph NEAR/2 gimel";}i:22;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:17;a:2:{s:6:"weight";s:4:"2723";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"bet";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"he";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"bet NEAR/3 he";}i:23;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:15:"aleph-bet-gimel";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"he";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:25:"aleph-bet-gimel NEAR/3 he";}i:24;a:3:{s:8:"sphinxql";s:52:"select * from idx where match ('aleph NEAR/2 gimel')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"17";s:6:"weight";s:4:"2723";}}}i:25;a:3:{s:8:"sphinxql";s:47:"select * from idx where match ('bet NEAR/3 he')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"17";s:6:"weight";s:4:"2723";}}}i:26;a:2:{s:8:"sphinxql";s:47:"select * from idx where match ('bet NEAR/2 he')";s:10:"total_rows";i:0;}i:27;a:3:{s:8:"sphinxql";s:59:"select * from idx where match ('aleph-bet-gimel NEAR/4 he')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"17";s:6:"weight";s:4:"2723";}}}i:28;a:2:{s:8:"sphinxql";s:59:"select * from idx where match ('aleph-bet-gimel NEAR/3 he')";s:10:"total_rows";i:0;}i:29;a:3:{s:8:"sphinxql";s:99:"call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'gimel NEAR/2 he', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:52:"aleph-bet-gimel dalet he wav zajin het";}}}i:30;a:3:{s:8:"sphinxql";s:97:"call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'bet NEAR/3 he', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:52:"aleph-bet-gimel dalet he wav zajin het";}}}i:31;a:3:{s:8:"sphinxql";s:102:"call snippets ('aleph-bet-gimel dalet he wav zajin het', 'idx', 'aleph NEAR/2 gimel', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:52:"aleph-bet-gimel dalet he wav zajin het";}}}i:32;a:3:{s:8:"sphinxql";s:102:"CALL SNIPPETS ('U.S.S.S.B s s s s today s s s s s tomorrow', 'idx', 'B NEAR/7 today', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:56:"U.S.S.S.B s s s s today s s s s s tomorrow";}}}i:33;a:3:{s:8:"sphinxql";s:102:"CALL SNIPPETS ('U.S.S.S.A s s s s today s s s s s tomorrow', 'idx', 'A NEAR/7 today', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:56:"U.S.S.S.A s s s s today s s s s s tomorrow";}}}i:34;a:3:{s:8:"sphinxql";s:59:"select * from idx where match ('oy NEAR/1 vey') and @id>=22";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"22";s:6:"weight";s:4:"2633";}}}}}sphinx-2.0.4-release/test/test_165/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_165/synonyms2.txt0000644000176700017710000000003611515644273021025 0ustar deogardeogaraction*packed => action*packedsphinx-2.0.4-release/test/test_165/synonyms3.txt0000644000176700017710000000000711660222732021015 0ustar deogardeogar; => ;)sphinx-2.0.4-release/test/test_165/synonyms.txt0000644000176700017710000000017411515523374020744 0ustar deogardeogarthe (dummy) => the (dummy) the (dummy) => dummic silver screen => projection screen X files => X files X file => X filessphinx-2.0.4-release/test/test_165/test.xml0000644000176700017710000000622411660222732020002 0ustar deogardeogar exceptions vs ext2 query grouping indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT *, 1 as idd FROM test_table } index test { source = srctest path = /test charset_type = utf-8 exceptions = test_165/synonyms.txt } source srcstar { type = mysql sql_query = SELECT *, 1 as idd FROM star_table } index star { source = srcstar path = /star charset_type = utf-8 exceptions = test_165/synonyms2.txt enable_star = 1 min_prefix_len = 1 } index snip { source = srctest path = /snip charset_type = utf-8 exceptions = test_165/synonyms3.txt } CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) CREATE TABLE `star_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` DROP TABLE IF EXISTS `star_table` INSERT INTO `test_table` VALUES ( 1, 'the X files is not X rated movie' ), ( 2, 'the X files is silver sitcom' ), ( 3, 'the x files is good on silver screen' ) INSERT INTO `star_table` VALUES ( 1, 'action*packed movies are not for family' ), ( 2, 'action movies are mostly blockbasters' ), ( 3, 'action packed movies published recently' ), ( 4, 'recent DVD pack as collection' ) select * from test where match ('X files') select * from test where match ('(X files)') select * from test where match (' ( X files ) ') select * from test where match ('(x files) silver screen') select * from test where match (' X files silver screen') select * from test where match ('(x file) (silver screen)') select * from test where match ('(x file) | silver screen') select * from test where match ('(X files) | silver screen') select * from test where match ('(x files) | silver | screen') select * from test where match ('x file') select * from test where match ('the X file') select * from star where match ('action*packed') select * from star where match ('mov*') select * from star where match ('pack*') select * from star where match ('action') CALL SNIPPETS('; the test', 'snip', ';doggy the', 1 as force_all_words ) CALL SNIPPETS('; the test', 'snip', 'doggy the', 1 as force_all_words ) CALL SNIPPETS('; the test', 'snip', 'doggy the', 0 as force_all_words ) sphinx-2.0.4-release/test/test_165/model.bin0000644000176700017710000000674211660222732020100 0ustar deogardeogara:1:{i:0;a:18:{i:0;a:3:{s:8:"sphinxql";s:42:"select * from test where match ('X files')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:1;a:3:{s:8:"sphinxql";s:44:"select * from test where match ('(X files)')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:2;a:3:{s:8:"sphinxql";s:48:"select * from test where match (' ( X files ) ')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:3;a:3:{s:8:"sphinxql";s:58:"select * from test where match ('(x files) silver screen')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2620";}}}i:4;a:2:{s:8:"sphinxql";s:57:"select * from test where match (' X files silver screen')";s:10:"total_rows";i:0;}i:5;a:2:{s:8:"sphinxql";s:59:"select * from test where match ('(x file) (silver screen)')";s:10:"total_rows";i:0;}i:6;a:3:{s:8:"sphinxql";s:59:"select * from test where match ('(x file) | silver screen')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1560";}}}i:7;a:3:{s:8:"sphinxql";s:60:"select * from test where match ('(X files) | silver screen')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1590";}i:1;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:2;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:8;a:3:{s:8:"sphinxql";s:62:"select * from test where match ('(x files) | silver | screen')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2545";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1545";}}}i:9;a:2:{s:8:"sphinxql";s:41:"select * from test where match ('x file')";s:10:"total_rows";i:0;}i:10;a:3:{s:8:"sphinxql";s:45:"select * from test where match ('the X file')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"2409";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2409";}}}i:11;a:3:{s:8:"sphinxql";s:48:"select * from star where match ('action*packed')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";}}}i:12;a:3:{s:8:"sphinxql";s:39:"select * from star where match ('mov*')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1442";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";}}}i:13;a:3:{s:8:"sphinxql";s:40:"select * from star where match ('pack*')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";}i:1;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1557";}}}i:14;a:3:{s:8:"sphinxql";s:41:"select * from star where match ('action')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1557";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1557";}}}i:15;a:3:{s:8:"sphinxql";s:72:"CALL SNIPPETS('; the test', 'snip', ';doggy the', 1 as force_all_words )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:24:"; the test";}}}i:16;a:3:{s:8:"sphinxql";s:71:"CALL SNIPPETS('; the test', 'snip', 'doggy the', 1 as force_all_words )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:17:"; the test";}}}i:17;a:3:{s:8:"sphinxql";s:71:"CALL SNIPPETS('; the test', 'snip', 'doggy the', 0 as force_all_words )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:17:"; the test";}}}}}sphinx-2.0.4-release/test/test_181/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_181/test.xml0000644000176700017710000000322611573751441020006 0ustar deogardeogar index weights + sort by expression test (fullscan too) indexer { mem_limit = 32M } searchd { workers = threads } source test1 { type = mysql sql_query = select * from test_table where id%2=0 sql_attr_uint = attr } index test1 { source = test1 path = /test1 } source test2 { type = mysql sql_query = select * from test_table where id%2=1 } index test2 { source = test2 path = /test2 } index test3 { type = rt path = /test3 rt_field = title } index test4 { type = rt path = /test4 rt_field = title } create table test_table ( id int not null, title varchar(255) not null, attr int not null ); drop table if exists test_table; insert into test_table values ( 1, 'asdf asdf asdf', 11 ); insert into test_table values ( 2, 'asdf', 22 ); asdf insert into test3 ( id, title ) values ( 1, 'asdf asdf asdf' ) insert into test4 ( id, title ) values ( 2, 'asdf' ) select *, @weight as skey from test3, test4 where match('asdf') option ranker=wordcount, index_weights=(test3=1, test4=10) select * from test3 option index_weights=(test3=10) sphinx-2.0.4-release/test/test_181/model.bin0000644000176700017710000000307511573751441020101 0ustar deogardeogara:1:{i:0;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:5:"@expr";i:5;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:2:"10";s:5:"attrs";a:1:{s:5:"@expr";d:10;}}i:1;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:1:{s:5:"@expr";d:3;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"asdf";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"asdf";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:4:"attr";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:2:"11";s:5:"attrs";a:1:{s:4:"attr";s:2:"22";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:2:{s:8:"sphinxql";s:62:"insert into test3 ( id, title ) values ( 1, 'asdf asdf asdf' )";s:14:"total_affected";i:1;}i:3;a:2:{s:8:"sphinxql";s:52:"insert into test4 ( id, title ) values ( 2, 'asdf' )";s:14:"total_affected";i:1;}i:4;a:3:{s:8:"sphinxql";s:122:"select *, @weight as skey from test3, test4 where match('asdf') option ranker=wordcount, index_weights=(test3=1, test4=10)";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:2:"10";s:4:"skey";s:2:"10";}i:1;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"3";s:4:"skey";s:1:"3";}}}i:5;a:3:{s:8:"sphinxql";s:51:"select * from test3 option index_weights=(test3=10)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:2:"10";}}}}}sphinx-2.0.4-release/test/test_012/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_012/test.xml0000644000176700017710000000463311503636205017773 0ustar deogardeogar stemmers + infixes (throttled) indexer { mem_limit = 16M max_iops = 20 max_iosize = 131072 } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table sql_query_pre = SET NAMES utf8 } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 morphology = stem_en morphology = stem_ru morphology = soundex morphology = metaphone min_infix_len = 0 min_infix_len = 3 enable_star = 0 enable_star = 1 } admin *earc* up* dmin rep pda алго* *лиз *бот* Ñравнительный CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) CHARACTER SET utf8 DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan'), (4,4,'Ðлгоритмы', 'Ðедавно захотел провеÑти Ñравнительный анализ алгоритмов работы ÑредÑтв полнотекÑтового поиÑка.', 'Yuriy' ) sphinx-2.0.4-release/test/test_012/model.bin0000644000176700017710000012417310733773353020100 0ustar deogardeogara:16:{i:0;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:1;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"алг";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:22:"Ñравнительн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:2;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"a350";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"e620";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"u1";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"d500";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"r10";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"p30";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:3;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ATMN";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ARK";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"AP";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"TMN";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"RP";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"PT";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:1:{i:0;s:6:"failed";}i:7;a:1:{i:0;s:6:"failed";}i:8;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:9;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"алг";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:22:"Ñравнительн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:10;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"a350";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"e620";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"u1";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"d500";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"r10";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"p30";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:11;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ATMN";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ARK";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"AP";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"TMN";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"RP";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"PT";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"алго";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:9:"алго*";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"лиз";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:7:"*лиз";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"бот";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:12;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min infix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"алго*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:5:"query";s:9:"алго*";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"*лиз";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"*лиз";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"*бот*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:13;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min infix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"алго*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:5:"query";s:9:"алго*";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"*лиз";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"*лиз";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"*бот*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:22:"Ñравнительн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:14;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"a350";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min infix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"d500";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"r10";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"p30";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"алго*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:5:"query";s:9:"алго*";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"*лиз";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"*лиз";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"*бот*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}i:15;a:10:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ATMN";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min infix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"TMN";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"RP";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"PT";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"алго*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:5:"query";s:9:"алго*";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"*лиз";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"*лиз";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"*бот*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:8:"*бот*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:26:"Ñравнительный";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:26:"Ñравнительный";}}}sphinx-2.0.4-release/test/test_022/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_022/test.xml0000644000176700017710000000173111140654322017765 0ustar deogardeogar wordforms vs case folding indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 wordforms = wordforms.txt } FOLDED folded shortform sOMEwHATmORElONGfORMtHATwOULDoVERFLOWsHORTfORMbUFFER CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test FoLdEd' ), ( 2, 'folded' ), ( 3, 'shortform' ), ( 4, 'SomeWhatMoreLongFormThatWouldOverflowShortFormBuffer' ) sphinx-2.0.4-release/test/test_022/model.bin0000644000176700017710000000360611454725303020067 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"wrapped";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"FOLDED";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"wrapped";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"folded";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:42:"somewhatmorelongformthatwouldoverflowshort";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"shortform";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:42:"somewhatmorelongformthatwouldoverflowshort";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:52:"sOMEwHATmORElONGfORMtHATwOULDoVERFLOWsHORTfORMbUFFER";}}}sphinx-2.0.4-release/test/test_145/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_145/stopwords.txt0000644000176700017710000000003511444070775021106 0ustar deogardeogarTO WITH ANY AND ALL TO sphinx-2.0.4-release/test/test_145/test.xml0000644000176700017710000000524111662472433020005 0ustar deogardeogar snippets vs stopwords searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 stopwords = test_145/stopwords.txt morphology = stem_en phrase_boundary = U+002C phrase_boundary_step = 4 } select 1; CALL SNIPPETS('click edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('click word1 WITH edit', 'test', '"click TO word1 edit"', 1 AS query_mode ) CALL SNIPPETS('click WITH word1 edit', 'test', '"click TO word1 edit"', 1 AS query_mode ) CALL SNIPPETS('clicking ANY edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('click ANY edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('edit ANY click', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('clicking WITH ANY AND ALL TO edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('click WITH ANY AND ALL TO edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('either click, edit', 'test', '"click TO edit"', 1 AS query_mode ) CALL SNIPPETS('either click, edit', 'test', '"click TO ANY AND ALL edit"', 1 AS query_mode ) CALL SNIPPETS('click edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('click word1 WITH edit', 'test', 'click TO word1 edit', 1 AS exact_phrase ) CALL SNIPPETS('click WITH word1 edit', 'test', 'click TO word1 edit', 1 AS exact_phrase ) CALL SNIPPETS('clicking ANY edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('click ANY edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('edit ANY click', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('clicking WITH ANY AND ALL TO edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('click WITH ANY AND ALL TO edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('either click, edit', 'test', 'click TO edit', 1 AS exact_phrase ) CALL SNIPPETS('either click, edit', 'test', 'click TO ANY AND ALL edit', 1 AS exact_phrase ) sphinx-2.0.4-release/test/test_145/model.bin0000644000176700017710000001024711662472433020100 0ustar deogardeogara:1:{i:0;a:20:{i:0;a:3:{s:8:"sphinxql";s:72:"CALL SNIPPETS('click edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:10:"click edit";}}}i:1;a:3:{s:8:"sphinxql";s:89:"CALL SNIPPETS('click word1 WITH edit', 'test', '"click TO word1 edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:21:"click word1 WITH edit";}}}i:2;a:3:{s:8:"sphinxql";s:89:"CALL SNIPPETS('click WITH word1 edit', 'test', '"click TO word1 edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:35:"click WITH word1 edit";}}}i:3;a:3:{s:8:"sphinxql";s:79:"CALL SNIPPETS('clicking ANY edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:31:"clicking ANY edit";}}}i:4;a:3:{s:8:"sphinxql";s:76:"CALL SNIPPETS('click ANY edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:28:"click ANY edit";}}}i:5;a:3:{s:8:"sphinxql";s:76:"CALL SNIPPETS('edit ANY click', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:14:"edit ANY click";}}}i:6;a:3:{s:8:"sphinxql";s:95:"CALL SNIPPETS('clicking WITH ANY AND ALL TO edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:33:"clicking WITH ANY AND ALL TO edit";}}}i:7;a:3:{s:8:"sphinxql";s:92:"CALL SNIPPETS('click WITH ANY AND ALL TO edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"click WITH ANY AND ALL TO edit";}}}i:8;a:3:{s:8:"sphinxql";s:80:"CALL SNIPPETS('either click, edit', 'test', '"click TO edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:18:"either click, edit";}}}i:9;a:3:{s:8:"sphinxql";s:92:"CALL SNIPPETS('either click, edit', 'test', '"click TO ANY AND ALL edit"', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:32:"either click, edit";}}}i:10;a:3:{s:8:"sphinxql";s:72:"CALL SNIPPETS('click edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:10:"click edit";}}}i:11;a:3:{s:8:"sphinxql";s:89:"CALL SNIPPETS('click word1 WITH edit', 'test', 'click TO word1 edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:21:"click word1 WITH edit";}}}i:12;a:3:{s:8:"sphinxql";s:89:"CALL SNIPPETS('click WITH word1 edit', 'test', 'click TO word1 edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:28:"click WITH word1 edit";}}}i:13;a:3:{s:8:"sphinxql";s:79:"CALL SNIPPETS('clicking ANY edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:24:"clicking ANY edit";}}}i:14;a:3:{s:8:"sphinxql";s:76:"CALL SNIPPETS('click ANY edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:21:"click ANY edit";}}}i:15;a:3:{s:8:"sphinxql";s:76:"CALL SNIPPETS('edit ANY click', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:14:"edit ANY click";}}}i:16;a:3:{s:8:"sphinxql";s:95:"CALL SNIPPETS('clicking WITH ANY AND ALL TO edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:33:"clicking WITH ANY AND ALL TO edit";}}}i:17;a:3:{s:8:"sphinxql";s:92:"CALL SNIPPETS('click WITH ANY AND ALL TO edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"click WITH ANY AND ALL TO edit";}}}i:18;a:3:{s:8:"sphinxql";s:80:"CALL SNIPPETS('either click, edit', 'test', 'click TO edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:18:"either click, edit";}}}i:19;a:3:{s:8:"sphinxql";s:92:"CALL SNIPPETS('either click, edit', 'test', 'click TO ANY AND ALL edit', 1 AS exact_phrase )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:25:"either click, edit";}}}}}sphinx-2.0.4-release/test/test_171/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_171/test.xml0000644000176700017710000002260411571611211017772 0ustar deogardeogar new and old syntax for id, count, weight, count distinct indexer { mem_limit = 28M } searchd { compat_sphinxql_magics = 0 workers = threads } source auth { type = mysql } source src : auth { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id<4 sql_query_pre = SET NAMES UTF8 sql_attr_uint = tag sql_attr_uint = gr } source src1 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id<3 } source src2 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id=3 } index dist_no { source = src path = /idx docinfo = extern charset_type = utf-8 min_word_len = 1 } index idx1 : dist_no { source = src1 path = /idx1 } index idx2 : dist_no { source = src2 path = /idx2 } index dist0 { type = distributed local = dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist1 { type = distributed local = idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist2 { type = distributed agent = :idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist3 { type = distributed local = idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } select gr, count(*) cnt from dist_no group by gr select gr, count(*) cnt from dist0 group by gr select gr, count(*) cnt from dist1 group by gr select gr, count(*) cnt from dist2 group by gr select gr, count(*) cnt from dist3 group by gr select id idd, weight() cnt from dist_no group by gr select id idd, weight() cnt from dist0 group by gr select id idd, weight() cnt from dist1 group by gr select id idd, weight() cnt from dist2 group by gr select id idd, weight() cnt from dist3 group by gr select count(*) cnt, count (distinct tag) tg from dist_no group by gr select count(*) cnt, count (distinct tag) tg from dist0 group by gr select count(*) cnt, count (distinct tag) tg from dist1 group by gr select count(*) cnt, count (distinct tag) tg from dist2 group by gr select count(*) cnt, count (distinct tag) tg from dist3 group by gr select id, tag from dist_no where match ('test') select id, tag from dist0 where match ('test') select id, tag from dist1 where match ('test') select id, tag from dist2 where match ('test') select id, tag from dist3 where match ('test') select id idd, tag from dist_no where match ('test') select id idd, tag from dist0 where match ('test') select id idd, tag from dist1 where match ('test') select id idd, tag from dist2 where match ('test') select id idd, tag from dist3 where match ('test') select gr, count(*) cnt from dist_no select gr, count(*) cnt from dist0 select gr, count(*) cnt from dist1 select gr from dist_no where count(*)=1 select gr from dist_no where @count=1 select gr from dist_no where weight()=1 select gr from dist_no where @weight=1 select gr from dist0 where count(*)=1 select gr from dist0 where @count=1 select gr from dist0 where weight()=1 select gr from dist0 where @weight=1 select gr from dist1 where count(*)=1 select gr from dist1 where @count=1 select gr from dist1 where weight()=1 select gr from dist1 where @weight=1 select gr from dist2 where count(*)=1 select gr from dist2 where @count=1 select gr from dist2 where weight()=1 select gr from dist2 where @weight=1 select gr from dist3 where count(*)=1 select gr from dist3 where @count=1 select gr from dist3 where weight()=1 select gr from dist3 where @weight=1 select @id idd from dist_no where match('test') show warnings select @count cnt from dist_no group by gr show warnings select @weight cnt from dist_no group by gr show warnings select @id idd from dist0 where match('test') show warnings select @count cnt from dist0 group by gr show warnings select @weight cnt from dist0 group by gr show warnings select @id idd from dist1 where match('test') show warnings select @count cnt from dist1 group by gr show warnings select @weight cnt from dist1 group by gr show warnings select @id idd from dist2 where match('test') show warnings select @count cnt from dist2 group by gr show warnings select @weight cnt from dist2 group by gr show warnings select @id idd from dist3 where match('test') show warnings select @count cnt from dist3 group by gr show warnings select @weight cnt from dist3 group by gr select @id idd, count(*) cnt from dist_no group by gr select id idd, @count cnt from dist_no group by gr select id idd from dist_no where @count=1 group by gr select @id idd from dist_no where count(*)=1 group by gr select id idd from dist_no group by @count select @id idd from dist_no group by count(*) select @id idd, count(*) cnt from dist0 group by gr select id idd, @count cnt from dist0 group by gr select id idd from dist0 where @count=1 group by gr select @id idd from dist0 where count(*)=1 group by gr select id idd from dist0 group by @count select @id idd from dist0 group by count(*) select @id idd, count(*) cnt from dist1 group by gr select id idd, @count cnt from dist1 group by gr select id idd from dist1 where @count=1 group by gr select @id idd from dist1 where count(*)=1 group by gr select id idd from dist1 group by @count select @id idd from dist1 group by count(*) select @id idd, count(*) cnt from dist2 group by gr select id idd, @count cnt from dist2 group by gr select id idd from dist2 where @count=1 group by gr select @id idd from dist2 where count(*)=1 group by gr select id idd from dist2 group by @count select @id idd from dist2 group by count(*) select @id idd, count(*) cnt from dist3 group by gr select id idd, @count cnt from dist3 group by gr select id idd from dist3 where @count=1 group by gr select @id idd from dist3 where count(*)=1 group by gr select id idd from dist3 group by @count select @id idd from dist3 group by count(*) CREATE TABLE `test_table` ( `id` int(11), `tag` int(11), `gr` int(11), `text` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES ( 1, 2, 3, 'test'), ( 2, 2, 2, 'test'), ( 3, 2, 3, 'test'), ( 4, 2, 10, 'test'), ( 5, 2, 9, 'test'), ( 6, 2, 8, 'test'), ( 7, 2, 7, 'test'), ( 8, 2, 6, 'test'), ( 9, 2, 5, 'test'), ( 10, 2, 4, 'test') sphinx-2.0.4-release/test/test_171/model.bin0000644000176700017710000005202211632423371020066 0ustar deogardeogara:1:{i:0;a:107:{i:0;a:3:{s:8:"sphinxql";s:48:"select gr, count(*) cnt from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:3:"cnt";s:1:"2";}i:1;a:2:{s:2:"gr";s:1:"2";s:3:"cnt";s:1:"1";}}}i:1;a:3:{s:8:"sphinxql";s:46:"select gr, count(*) cnt from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:3:"cnt";s:1:"2";}i:1;a:2:{s:2:"gr";s:1:"2";s:3:"cnt";s:1:"1";}}}i:2;a:3:{s:8:"sphinxql";s:46:"select gr, count(*) cnt from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:3:"cnt";s:1:"2";}i:1;a:2:{s:2:"gr";s:1:"2";s:3:"cnt";s:1:"1";}}}i:3;a:3:{s:8:"sphinxql";s:46:"select gr, count(*) cnt from dist2 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:3:"cnt";s:1:"2";}i:1;a:2:{s:2:"gr";s:1:"2";s:3:"cnt";s:1:"1";}}}i:4;a:3:{s:8:"sphinxql";s:46:"select gr, count(*) cnt from dist3 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:3:"cnt";s:1:"2";}i:1;a:2:{s:2:"gr";s:1:"2";s:3:"cnt";s:1:"1";}}}i:5;a:3:{s:8:"sphinxql";s:52:"select id idd, weight() cnt from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"cnt";s:1:"1";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"cnt";s:1:"1";}}}i:6;a:3:{s:8:"sphinxql";s:50:"select id idd, weight() cnt from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"cnt";s:1:"1";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"cnt";s:1:"1";}}}i:7;a:3:{s:8:"sphinxql";s:50:"select id idd, weight() cnt from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"cnt";s:1:"1";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"cnt";s:1:"1";}}}i:8;a:3:{s:8:"sphinxql";s:50:"select id idd, weight() cnt from dist2 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"cnt";s:1:"1";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"cnt";s:1:"1";}}}i:9;a:3:{s:8:"sphinxql";s:50:"select id idd, weight() cnt from dist3 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"cnt";s:1:"1";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"cnt";s:1:"1";}}}i:10;a:3:{s:8:"sphinxql";s:69:"select count(*) cnt, count (distinct tag) tg from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"cnt";s:1:"2";s:2:"tg";s:1:"1";}i:1;a:2:{s:3:"cnt";s:1:"1";s:2:"tg";s:1:"1";}}}i:11;a:3:{s:8:"sphinxql";s:67:"select count(*) cnt, count (distinct tag) tg from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"cnt";s:1:"2";s:2:"tg";s:1:"1";}i:1;a:2:{s:3:"cnt";s:1:"1";s:2:"tg";s:1:"1";}}}i:12;a:3:{s:8:"sphinxql";s:67:"select count(*) cnt, count (distinct tag) tg from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:3:"cnt";s:1:"2";s:2:"tg";s:1:"2";}i:1;a:2:{s:3:"cnt";s:1:"1";s:2:"tg";s:1:"1";}}}i:13;a:2:{s:8:"sphinxql";s:67:"select count(*) cnt, count (distinct tag) tg from dist2 group by gr";s:10:"total_rows";i:0;}i:14;a:2:{s:8:"sphinxql";s:67:"select count(*) cnt, count (distinct tag) tg from dist3 group by gr";s:10:"total_rows";i:0;}i:15;a:3:{s:8:"sphinxql";s:48:"select id, tag from dist_no where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";}i:1;a:2:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:2:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";}}}i:16;a:3:{s:8:"sphinxql";s:46:"select id, tag from dist0 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";}i:1;a:2:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:2:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:46:"select id, tag from dist1 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";}}}i:18;a:3:{s:8:"sphinxql";s:46:"select id, tag from dist2 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";}}}i:19;a:3:{s:8:"sphinxql";s:46:"select id, tag from dist3 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";}}}i:20;a:3:{s:8:"sphinxql";s:52:"select id idd, tag from dist_no where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"tag";s:1:"2";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:2:{s:3:"idd";s:1:"3";s:3:"tag";s:1:"2";}}}i:21;a:3:{s:8:"sphinxql";s:50:"select id idd, tag from dist0 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:3:"idd";s:1:"1";s:3:"tag";s:1:"2";}i:1;a:2:{s:3:"idd";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:2:{s:3:"idd";s:1:"3";s:3:"tag";s:1:"2";}}}i:22;a:3:{s:8:"sphinxql";s:50:"select id idd, tag from dist1 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:3:"idd";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:3:"idd";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:3:"idd";s:1:"2";s:3:"tag";s:1:"2";}}}i:23;a:3:{s:8:"sphinxql";s:50:"select id idd, tag from dist2 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:3:"idd";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:3:"idd";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:3:"idd";s:1:"2";s:3:"tag";s:1:"2";}}}i:24;a:3:{s:8:"sphinxql";s:50:"select id idd, tag from dist3 where match ('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:3:"idd";s:1:"3";s:3:"tag";s:1:"2";}i:1;a:2:{s:3:"idd";s:1:"1";s:3:"tag";s:1:"2";}i:2;a:2:{s:3:"idd";s:1:"2";s:3:"tag";s:1:"2";}}}i:25;a:3:{s:8:"sphinxql";s:36:"select gr, count(*) cnt from dist_no";s:5:"error";s:93:"index dist_no: invalid schema: Count(*) or @count is queried, but not available in the schema";s:5:"errno";i:1064;}i:26;a:3:{s:8:"sphinxql";s:34:"select gr, count(*) cnt from dist0";s:5:"error";s:93:"index dist_no: invalid schema: Count(*) or @count is queried, but not available in the schema";s:5:"errno";i:1064;}i:27;a:3:{s:8:"sphinxql";s:34:"select gr, count(*) cnt from dist1";s:5:"error";s:95:"index idx1,idx2: invalid schema: Count(*) or @count is queried, but not available in the schema";s:5:"errno";i:1064;}i:28;a:3:{s:8:"sphinxql";s:39:"select gr from dist_no where count(*)=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:29;a:3:{s:8:"sphinxql";s:37:"select gr from dist_no where @count=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:30;a:3:{s:8:"sphinxql";s:39:"select gr from dist_no where weight()=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:31;a:3:{s:8:"sphinxql";s:38:"select gr from dist_no where @weight=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:32;a:3:{s:8:"sphinxql";s:37:"select gr from dist0 where count(*)=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:33;a:3:{s:8:"sphinxql";s:35:"select gr from dist0 where @count=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:34;a:3:{s:8:"sphinxql";s:37:"select gr from dist0 where weight()=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:35;a:3:{s:8:"sphinxql";s:36:"select gr from dist0 where @weight=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:36;a:3:{s:8:"sphinxql";s:37:"select gr from dist1 where count(*)=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:37;a:3:{s:8:"sphinxql";s:35:"select gr from dist1 where @count=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:38;a:3:{s:8:"sphinxql";s:37:"select gr from dist1 where weight()=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:39;a:3:{s:8:"sphinxql";s:36:"select gr from dist1 where @weight=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:40;a:3:{s:8:"sphinxql";s:37:"select gr from dist2 where count(*)=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:41;a:3:{s:8:"sphinxql";s:35:"select gr from dist2 where @count=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:42;a:3:{s:8:"sphinxql";s:37:"select gr from dist2 where weight()=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:43;a:3:{s:8:"sphinxql";s:36:"select gr from dist2 where @weight=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:44;a:3:{s:8:"sphinxql";s:37:"select gr from dist3 where count(*)=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:45;a:3:{s:8:"sphinxql";s:35:"select gr from dist3 where @count=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:46;a:3:{s:8:"sphinxql";s:37:"select gr from dist3 where weight()=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:47;a:3:{s:8:"sphinxql";s:36:"select gr from dist3 where @weight=1";s:5:"error";s:58:"sphinxql: Aggregates in 'where' clause prohibited near '1'";s:5:"errno";i:1064;}i:48;a:3:{s:8:"sphinxql";s:47:"select @id idd from dist_no where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:3:"idd";s:1:"1";}i:1;a:1:{s:3:"idd";s:1:"2";}i:2;a:1:{s:3:"idd";s:1:"3";}}}i:49;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:50;a:3:{s:8:"sphinxql";s:42:"select @count cnt from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"2";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:51;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:52;a:3:{s:8:"sphinxql";s:43:"select @weight cnt from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"1";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:53;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:54;a:3:{s:8:"sphinxql";s:45:"select @id idd from dist0 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:3:"idd";s:1:"1";}i:1;a:1:{s:3:"idd";s:1:"2";}i:2;a:1:{s:3:"idd";s:1:"3";}}}i:55;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:56;a:3:{s:8:"sphinxql";s:40:"select @count cnt from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"2";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:57;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:58;a:3:{s:8:"sphinxql";s:41:"select @weight cnt from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"1";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:59;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:60;a:3:{s:8:"sphinxql";s:45:"select @id idd from dist1 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:3:"idd";s:1:"3";}i:1;a:1:{s:3:"idd";s:1:"1";}i:2;a:1:{s:3:"idd";s:1:"2";}}}i:61;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:62;a:3:{s:8:"sphinxql";s:40:"select @count cnt from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"2";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:63;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:64;a:3:{s:8:"sphinxql";s:41:"select @weight cnt from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"1";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:65;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:66;a:3:{s:8:"sphinxql";s:45:"select @id idd from dist2 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:3:"idd";s:1:"3";}i:1;a:1:{s:3:"idd";s:1:"1";}i:2;a:1:{s:3:"idd";s:1:"2";}}}i:67;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:68;a:3:{s:8:"sphinxql";s:40:"select @count cnt from dist2 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"2";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:69;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:70;a:3:{s:8:"sphinxql";s:41:"select @weight cnt from dist2 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"1";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:71;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:72;a:3:{s:8:"sphinxql";s:45:"select @id idd from dist3 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:3:"idd";s:1:"3";}i:1;a:1:{s:3:"idd";s:1:"1";}i:2;a:1:{s:3:"idd";s:1:"2";}}}i:73;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:74;a:3:{s:8:"sphinxql";s:40:"select @count cnt from dist3 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"2";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:75;a:2:{s:8:"sphinxql";s:13:"show warnings";s:14:"total_affected";i:0;}i:76;a:3:{s:8:"sphinxql";s:41:"select @weight cnt from dist3 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"cnt";s:1:"1";}i:1;a:1:{s:3:"cnt";s:1:"1";}}}i:77;a:3:{s:8:"sphinxql";s:53:"select @id idd, count(*) cnt from dist_no group by gr";s:5:"error";s:163:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ') cnt from dist_no group by gr'";s:5:"errno";i:1064;}i:78;a:3:{s:8:"sphinxql";s:50:"select id idd, @count cnt from dist_no group by gr";s:5:"error";s:168:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count cnt from dist_no group by gr'";s:5:"errno";i:1064;}i:79;a:3:{s:8:"sphinxql";s:53:"select id idd from dist_no where @count=1 group by gr";s:5:"error";s:153:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count=1 group by gr'";s:5:"errno";i:1064;}i:80;a:3:{s:8:"sphinxql";s:56:"select @id idd from dist_no where count(*)=1 group by gr";s:5:"error";s:148:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')=1 group by gr'";s:5:"errno";i:1064;}i:81;a:3:{s:8:"sphinxql";s:42:"select id idd from dist_no group by @count";s:5:"error";s:139:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count'";s:5:"errno";i:1064;}i:82;a:3:{s:8:"sphinxql";s:45:"select @id idd from dist_no group by count(*)";s:5:"error";s:134:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')'";s:5:"errno";i:1064;}i:83;a:3:{s:8:"sphinxql";s:51:"select @id idd, count(*) cnt from dist0 group by gr";s:5:"error";s:161:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ') cnt from dist0 group by gr'";s:5:"errno";i:1064;}i:84;a:3:{s:8:"sphinxql";s:48:"select id idd, @count cnt from dist0 group by gr";s:5:"error";s:166:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count cnt from dist0 group by gr'";s:5:"errno";i:1064;}i:85;a:3:{s:8:"sphinxql";s:51:"select id idd from dist0 where @count=1 group by gr";s:5:"error";s:153:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count=1 group by gr'";s:5:"errno";i:1064;}i:86;a:3:{s:8:"sphinxql";s:54:"select @id idd from dist0 where count(*)=1 group by gr";s:5:"error";s:148:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')=1 group by gr'";s:5:"errno";i:1064;}i:87;a:3:{s:8:"sphinxql";s:40:"select id idd from dist0 group by @count";s:5:"error";s:139:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count'";s:5:"errno";i:1064;}i:88;a:3:{s:8:"sphinxql";s:43:"select @id idd from dist0 group by count(*)";s:5:"error";s:134:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')'";s:5:"errno";i:1064;}i:89;a:3:{s:8:"sphinxql";s:51:"select @id idd, count(*) cnt from dist1 group by gr";s:5:"error";s:161:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ') cnt from dist1 group by gr'";s:5:"errno";i:1064;}i:90;a:3:{s:8:"sphinxql";s:48:"select id idd, @count cnt from dist1 group by gr";s:5:"error";s:166:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count cnt from dist1 group by gr'";s:5:"errno";i:1064;}i:91;a:3:{s:8:"sphinxql";s:51:"select id idd from dist1 where @count=1 group by gr";s:5:"error";s:153:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count=1 group by gr'";s:5:"errno";i:1064;}i:92;a:3:{s:8:"sphinxql";s:54:"select @id idd from dist1 where count(*)=1 group by gr";s:5:"error";s:148:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')=1 group by gr'";s:5:"errno";i:1064;}i:93;a:3:{s:8:"sphinxql";s:40:"select id idd from dist1 group by @count";s:5:"error";s:139:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count'";s:5:"errno";i:1064;}i:94;a:3:{s:8:"sphinxql";s:43:"select @id idd from dist1 group by count(*)";s:5:"error";s:134:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')'";s:5:"errno";i:1064;}i:95;a:3:{s:8:"sphinxql";s:51:"select @id idd, count(*) cnt from dist2 group by gr";s:5:"error";s:161:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ') cnt from dist2 group by gr'";s:5:"errno";i:1064;}i:96;a:3:{s:8:"sphinxql";s:48:"select id idd, @count cnt from dist2 group by gr";s:5:"error";s:166:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count cnt from dist2 group by gr'";s:5:"errno";i:1064;}i:97;a:3:{s:8:"sphinxql";s:51:"select id idd from dist2 where @count=1 group by gr";s:5:"error";s:153:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count=1 group by gr'";s:5:"errno";i:1064;}i:98;a:3:{s:8:"sphinxql";s:54:"select @id idd from dist2 where count(*)=1 group by gr";s:5:"error";s:148:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')=1 group by gr'";s:5:"errno";i:1064;}i:99;a:3:{s:8:"sphinxql";s:40:"select id idd from dist2 group by @count";s:5:"error";s:139:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count'";s:5:"errno";i:1064;}i:100;a:3:{s:8:"sphinxql";s:43:"select @id idd from dist2 group by count(*)";s:5:"error";s:134:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')'";s:5:"errno";i:1064;}i:101;a:3:{s:8:"sphinxql";s:51:"select @id idd, count(*) cnt from dist3 group by gr";s:5:"error";s:161:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ') cnt from dist3 group by gr'";s:5:"errno";i:1064;}i:102;a:3:{s:8:"sphinxql";s:48:"select id idd, @count cnt from dist3 group by gr";s:5:"error";s:166:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count cnt from dist3 group by gr'";s:5:"errno";i:1064;}i:103;a:3:{s:8:"sphinxql";s:51:"select id idd from dist3 where @count=1 group by gr";s:5:"error";s:153:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count=1 group by gr'";s:5:"errno";i:1064;}i:104;a:3:{s:8:"sphinxql";s:54:"select @id idd from dist3 where count(*)=1 group by gr";s:5:"error";s:148:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')=1 group by gr'";s:5:"errno";i:1064;}i:105;a:3:{s:8:"sphinxql";s:40:"select id idd from dist3 group by @count";s:5:"error";s:139:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near '@count'";s:5:"errno";i:1064;}i:106;a:3:{s:8:"sphinxql";s:43:"select @id idd from dist3 group by count(*)";s:5:"error";s:134:"sphinxql: Mixing the old-fashion internal vars (@id, @count, @weight) with new acronyms like count(*), weight() is prohibited near ')'";s:5:"errno";i:1064;}}}sphinx-2.0.4-release/test/test_060/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_060/test.xml0000644000176700017710000000223111677266637020012 0ustar deogardeogar select expressions parser vs IN() with 10K args indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT id, id AS tag, body FROM test_table sql_attr_uint = tag } index idx { source = src path = /main charset_type = utf-8 docinfo = extern } SetSelect ( "*, IN(@id,1," . $idlist . "3) AS q, crc32('the') as crc" ); $client->SetFilter ( "q", array(1) ); $r = $client->Query ( "" ); if ( $r ) { unset ( $r["time"] ); $results[] = $r; } else $results[] = $client->GetLastError(); ]]> CREATE TABLE test_table ( id int(11) NOT NULL default '0', body varchar(255) NOT NULL default '' ) INSERT INTO test_table VALUES ( 1,'one' ), ( 2,'two' ), ( 3,'three' ), ( 4,'four' ) DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_060/model.bin0000644000176700017710000000073211677266637020107 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:1:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:3:{s:3:"tag";i:1;s:1:"q";i:1;s:3:"crc";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:1:"1";s:1:"q";s:1:"1";s:3:"crc";s:10:"1011183078";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"tag";s:1:"3";s:1:"q";s:1:"1";s:3:"crc";s:10:"1011183078";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";}}}}sphinx-2.0.4-release/test/test_178/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_178/test.xml0000644000176700017710000000653611662472433020023 0ustar deogardeogar snippets vs index_exact_words searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 morphology = stem_en index_exact_words = 1 } index tstar : test { path = /tstar min_prefix_len = 1 enable_star = 1 morphology = stem_enru } index stars : test { path = /tstar min_infix_len = 1 enable_star = 1 morphology = stem_en } select 1; CALL SNIPPETS('Check vs Checking', 'test', 'check', 1 AS query_mode ) CALL SNIPPETS('Check vs Checking', 'test', '=check', 1 AS query_mode ) CALL SNIPPETS('Check vs Checking', 'test', 'checking', 1 AS query_mode ) CALL SNIPPETS('Check vs Checking', 'test', '=checking', 1 AS query_mode ) CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', 'check', 1 AS query_mode, 2 AS around, 50 AS limit ) CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', '=check', 1 AS query_mode, 2 AS around, 50 AS limit ) CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', 'checking', 1 AS query_mode, 2 AS around, 50 AS limit ) CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', '=checking', 1 AS query_mode, 2 AS around, 50 AS limit ) CALL SNIPPETS('check vs checking fast path', 'tstar', 'checki*', 1 AS query_mode, 0 AS limit ) CALL SNIPPETS('check vs checking regular path', 'tstar', 'checki*', 1 AS query_mode, 0 AS limit, 1 AS limit_passages ) CALL SNIPPETS('time is dying out like everething else here', 'stars', 'dying*', 1 AS query_mode, 20 AS limit ) CALL SNIPPETS('time is dying out like everething else here', 'stars', '*dying', 1 AS query_mode, 20 AS limit ) CALL SNIPPETS('time is dying out like everething else here', 'stars', '*dying*', 1 AS query_mode, 20 AS limit ) CALL SNIPPETS('time is dying out like everething else here', 'test', 'nothing', 1 AS query_mode, 40 AS limit, 1 as allow_empty ) CALL SNIPPETS('time is dying out like everething', 'test', 'nothing', 1 AS query_mode, 40 AS limit, 1 as allow_empty ) CALL SNIPPETS('time is dying out like everething else here', 'test', 'nothing', 40 AS limit, 1 as allow_empty ) CALL SNIPPETS('time is dying out like everething', 'test', 'nothing', 40 AS limit, 1 as allow_empty ) sphinx-2.0.4-release/test/test_178/model.bin0000644000176700017710000001040211662472433020077 0ustar deogardeogara:1:{i:0;a:17:{i:0;a:3:{s:8:"sphinxql";s:69:"CALL SNIPPETS('Check vs Checking', 'test', 'check', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:31:"Check vs Checking";}}}i:1;a:3:{s:8:"sphinxql";s:70:"CALL SNIPPETS('Check vs Checking', 'test', '=check', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:24:"Check vs Checking";}}}i:2;a:3:{s:8:"sphinxql";s:72:"CALL SNIPPETS('Check vs Checking', 'test', 'checking', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:31:"Check vs Checking";}}}i:3;a:3:{s:8:"sphinxql";s:73:"CALL SNIPPETS('Check vs Checking', 'test', '=checking', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:24:"Check vs Checking";}}}i:4;a:3:{s:8:"sphinxql";s:190:"CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', 'check', 1 AS query_mode, 2 AS around, 50 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:65:" ... things, including Check vs Checking, but ... ";}}}i:5;a:3:{s:8:"sphinxql";s:191:"CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', '=check', 1 AS query_mode, 2 AS around, 50 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:53:" ... things, including Check vs Checking ... ";}}}i:6;a:3:{s:8:"sphinxql";s:193:"CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', 'checking', 1 AS query_mode, 2 AS around, 50 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:65:" ... things, including Check vs Checking, but ... ";}}}i:7;a:3:{s:8:"sphinxql";s:194:"CALL SNIPPETS('the time has come, the walrus said, to handle many things, including Check vs Checking, but not quite limited to', 'test', '=checking', 1 AS query_mode, 2 AS around, 50 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:44:" ... Check vs Checking, but not ... ";}}}i:8;a:3:{s:8:"sphinxql";s:94:"CALL SNIPPETS('check vs checking fast path', 'tstar', 'checki*', 1 AS query_mode, 0 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:34:"check vs checking fast path";}}}i:9;a:3:{s:8:"sphinxql";s:118:"CALL SNIPPETS('check vs checking regular path', 'tstar', 'checki*', 1 AS query_mode, 0 AS limit, 1 AS limit_passages )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:37:"check vs checking regular path";}}}i:10;a:3:{s:8:"sphinxql";s:110:"CALL SNIPPETS('time is dying out like everething else here', 'stars', 'dying*', 1 AS query_mode, 20 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"time is dying out ... ";}}}i:11;a:3:{s:8:"sphinxql";s:110:"CALL SNIPPETS('time is dying out like everething else here', 'stars', '*dying', 1 AS query_mode, 20 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"time is dying out ... ";}}}i:12;a:3:{s:8:"sphinxql";s:111:"CALL SNIPPETS('time is dying out like everething else here', 'stars', '*dying*', 1 AS query_mode, 20 AS limit )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"time is dying out ... ";}}}i:13;a:3:{s:8:"sphinxql";s:128:"CALL SNIPPETS('time is dying out like everething else here', 'test', 'nothing', 1 AS query_mode, 40 AS limit, 1 as allow_empty )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:0:"";}}}i:14;a:3:{s:8:"sphinxql";s:119:"CALL SNIPPETS('time is dying out like everething', 'test', 'nothing', 1 AS query_mode, 40 AS limit, 1 as allow_empty )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:0:"";}}}i:15;a:3:{s:8:"sphinxql";s:112:"CALL SNIPPETS('time is dying out like everething else here', 'test', 'nothing', 40 AS limit, 1 as allow_empty )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:0:"";}}}i:16;a:3:{s:8:"sphinxql";s:102:"CALL SNIPPETS('time is dying out like everething', 'test', 'nothing', 40 AS limit, 1 as allow_empty )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:0:"";}}}}}sphinx-2.0.4-release/test/settings.inc0000644000176700017710000000627311550630667017276 0ustar deogardeogar $sd_address, "port" => $sd_port, "sqlport" => $sd_sphinxql_port ), array ( "address" => $agent_address, "port" => $agent_port, "sqlport" => $agent_port_sql ), array ( "address" => $agent_address, "port" => $agent_port+1, "sqlport" => $agent_port_sql+1 ) ); $index_data_path = "data"; $g_model = false; $g_id64 = false; $g_strict = false; $g_skipdemo = false; $g_usemarks = true; // that we mark the output of every test in the searchd.log and query.log $g_strictverbose = false; $windows = isset($_SERVER["WINDIR"]) || isset($_SERVER["windir"]) || isset($_SERVER["HOMEDRIVE"]); $action_retries = 20; $action_wait_timeout = 50000; $g_locals = null; $g_site_defaults = array ( 'queries' => 'queries.txt', 'qlimit' => null, 'api' => '../api/sphinxapi.php', 'mode' => 'aggregate', 'skip-searchd' => false, 'force-reindex' => false, 'malloc-scribble' => false, 'db-host' => 'localhost', 'db-user' => 'root', 'db-password' => '', 'db-name' => 'test', 'db-port' => 3306 ); // localsettings could include dev's own parameters // which is not to be commited into the public repo (in opposite to this settings.inc) if ( file_exists ('localsettings.inc') ) require_once ( 'localsettings.inc' ); function GetBanner () { global $g_locals; exec ( $g_locals['indexer'], $output, $result ); if ( count($output) == 0 ) { print "ERROR: failed to run the indexer\n"; exit ( 1 ); } else return $output; } function GuessIdSize () { global $g_id64; $banner = GetBanner(); $g_id64 = strstr ( $banner[0], 'id64' ) !== false; } function GetVersion () { $banner = GetBanner(); return $banner[0]; } function PublishLocals ( $locals, $benchmark ) { global $g_locals, $g_site_defaults, $windows; $sources = array(); if ( @$locals['root'] && !@$locals['bin'] && !@$locals['api'] ) { $locals['bin'] = $locals['root'] . '/src/'; $locals['api'] = $locals['root'] . '/api/sphinxapi.php'; } $text = @file_get_contents ( getenv('HOME') . '/.sphinx' ); if ( $text ) { eval('$init = array ' . $text . ';'); $sources[] = $init; } $sources[] = $g_site_defaults; foreach ( $sources as $source ) { foreach ( $source as $key => $value ) { if ( !array_key_exists ( $key, $locals ) ) $locals[$key] = $value; } } if ( !@$locals['bin'] ) { if ( $windows ) $locals['bin'] = $benchmark ? '..\\bin\\release\\' : '..\\bin\\debug\\'; else $locals['bin'] = '../src/'; } $ext = $windows ? ".exe" : ""; foreach ( array ( 'searchd', 'indexer' ) as $key ) { if ( !array_key_exists ( $key, $locals ) ) $locals[$key] = $locals['bin'] . $key . $ext; } $g_locals = $locals; } ?> sphinx-2.0.4-release/test/bench-results/0000755000176700017710000000000011724063141017477 5ustar deogardeogarsphinx-2.0.4-release/test/bench-results/stub.txt0000644000176700017710000000011411220101253021175 0ustar deogardeogarThis is stub file to force Mercurial to keep this folder in it's repository.sphinx-2.0.4-release/test/test_170/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_170/test.xml0000644000176700017710000000537411534137360020005 0ustar deogardeogar duplicate documents vs rt insert batches indexer { mem_limit = 16M } searchd { workers = threads } index rt { type = rt docinfo = extern charset_type = utf-8 path = /rt rt_attr_uint = gid rt_field = body rt_mem_limit = 8M } replace into rt (id, gid, body) values ( 1, 10, 'the' ), (2, 20, 'good'), (1, 11, 'a'), (3, 30, 'dog') select * from rt select * from rt where match ('a') select * from rt where match ('the') replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'a'), (2, 20, 'good'), (3, 30, 'dog') select * from rt select * from rt where match ('a') select * from rt where match ('the') replace into rt (id, gid, body) values (2, 20, 'good'), (3, 30, 'dog'), ( 1, 10, 'the' ), (1, 11, 'a') select * from rt select * from rt where match ('a') select * from rt where match ('the') replace into rt (id, gid, body) values ( 1, 10, 'the' ), (2, 20, 'good'), (1, 11, 'b'), (3, 30, 'dog'), (1, 12, 'a') select * from rt select * from rt where match ('a') select * from rt where match ('the | b') replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'b') , (1, 12, 'a'), (2, 20, 'good'), (3, 30, 'dog') select * from rt select * from rt where match ('a') select * from rt where match ('the | b') replace into rt (id, gid, body) values (2, 20, 'good'), (3, 30, 'dog'), ( 1, 10, 'the' ), (1, 11, 'b'), (1, 12, 'a') select * from rt select * from rt where match ('a') select * from rt where match ('the | b') replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'a'), (2, 20, 'good'), (3, 30, 'dog'), (2, 21, 'nice'), (2, 22, 'well'), (4, 40, 'as'), (3, 31, 'cat'), (3, 32, 'bird') select * from rt select * from rt where match ('a') select * from rt where match ('the') select * from rt where match ('well') select * from rt where match ('good | nice') select * from rt where match ('bird') select * from rt where match ('dog | cat') sphinx-2.0.4-release/test/test_170/model.bin0000644000176700017710000001327711534137360020077 0ustar deogardeogara:1:{i:0;a:32:{i:0;a:2:{s:8:"sphinxql";s:102:"replace into rt (id, gid, body) values ( 1, 10, 'the' ), (2, 20, 'good'), (1, 11, 'a'), (3, 30, 'dog')";s:14:"total_affected";i:4;}i:1;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:2;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"11";}}}i:3;a:2:{s:8:"sphinxql";s:36:"select * from rt where match ('the')";s:10:"total_rows";i:0;}i:4;a:2:{s:8:"sphinxql";s:102:"replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'a'), (2, 20, 'good'), (3, 30, 'dog')";s:14:"total_affected";i:4;}i:5;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:6;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"11";}}}i:7;a:2:{s:8:"sphinxql";s:36:"select * from rt where match ('the')";s:10:"total_rows";i:0;}i:8;a:2:{s:8:"sphinxql";s:102:"replace into rt (id, gid, body) values (2, 20, 'good'), (3, 30, 'dog'), ( 1, 10, 'the' ), (1, 11, 'a')";s:14:"total_affected";i:4;}i:9;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:10;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"11";}}}i:11;a:2:{s:8:"sphinxql";s:36:"select * from rt where match ('the')";s:10:"total_rows";i:0;}i:12;a:2:{s:8:"sphinxql";s:116:"replace into rt (id, gid, body) values ( 1, 10, 'the' ), (2, 20, 'good'), (1, 11, 'b'), (3, 30, 'dog'), (1, 12, 'a')";s:14:"total_affected";i:5;}i:13;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"12";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:14;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"12";}}}i:15;a:2:{s:8:"sphinxql";s:40:"select * from rt where match ('the | b')";s:10:"total_rows";i:0;}i:16;a:2:{s:8:"sphinxql";s:117:"replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'b') , (1, 12, 'a'), (2, 20, 'good'), (3, 30, 'dog')";s:14:"total_affected";i:5;}i:17;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"12";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:18;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"12";}}}i:19;a:2:{s:8:"sphinxql";s:40:"select * from rt where match ('the | b')";s:10:"total_rows";i:0;}i:20;a:2:{s:8:"sphinxql";s:116:"replace into rt (id, gid, body) values (2, 20, 'good'), (3, 30, 'dog'), ( 1, 10, 'the' ), (1, 11, 'b'), (1, 12, 'a')";s:14:"total_affected";i:5;}i:21;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"12";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"20";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"30";}}}i:22;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:2:"12";}}}i:23;a:2:{s:8:"sphinxql";s:40:"select * from rt where match ('the | b')";s:10:"total_rows";i:0;}i:24;a:2:{s:8:"sphinxql";s:184:"replace into rt (id, gid, body) values ( 1, 10, 'the' ), (1, 11, 'a'), (2, 20, 'good'), (3, 30, 'dog'), (2, 21, 'nice'), (2, 22, 'well'), (4, 40, 'as'), (3, 31, 'cat'), (3, 32, 'bird')";s:14:"total_affected";i:9;}i:25;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:2:"22";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"32";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:2:"40";}}}i:26;a:3:{s:8:"sphinxql";s:34:"select * from rt where match ('a')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"gid";s:2:"11";}}}i:27;a:2:{s:8:"sphinxql";s:36:"select * from rt where match ('the')";s:10:"total_rows";i:0;}i:28;a:3:{s:8:"sphinxql";s:37:"select * from rt where match ('well')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1695";s:3:"gid";s:2:"22";}}}i:29;a:2:{s:8:"sphinxql";s:44:"select * from rt where match ('good | nice')";s:10:"total_rows";i:0;}i:30;a:3:{s:8:"sphinxql";s:37:"select * from rt where match ('bird')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:3:"gid";s:2:"32";}}}i:31;a:2:{s:8:"sphinxql";s:42:"select * from rt where match ('dog | cat')";s:10:"total_rows";i:0;}}}sphinx-2.0.4-release/test/test_143/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_143/wordforms.txt0000644000176700017710000000001511436720627021057 0ustar deogardeogar0 es > none sphinx-2.0.4-release/test/test_143/test.xml0000644000176700017710000000162011436720627020000 0ustar deogardeogar snippets vs multiwordforms searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 wordforms = test_143/wordforms.txt blend_chars = + } select 1; CALL SNIPPETS('Kph on Europe road 1110', 'test', 'kph', 1 AS query_mode ) CALL SNIPPETS('when you on+time do it', 'test', 'you on', 1 AS query_mode ) CALL SNIPPETS('when you on+time do it', 'test', 'you time', 1 AS query_mode ) CALL SNIPPETS('you on+time 0 es you 0 at', 'test', 'none', 1 AS query_mode ) CALL SNIPPETS('you on+time 0 es you 0 at', 'test', '0', 1 AS query_mode ) sphinx-2.0.4-release/test/test_143/model.bin0000644000176700017710000000205511560651166020073 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:3:{s:8:"sphinxql";s:73:"CALL SNIPPETS('Kph on Europe road 1110', 'test', 'kph', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:30:"Kph on Europe road 1110";}}}i:1;a:3:{s:8:"sphinxql";s:75:"CALL SNIPPETS('when you on+time do it', 'test', 'you on', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:29:"when you on+time do it";}}}i:2;a:3:{s:8:"sphinxql";s:77:"CALL SNIPPETS('when you on+time do it', 'test', 'you time', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:36:"when you on+time do it";}}}i:3;a:3:{s:8:"sphinxql";s:76:"CALL SNIPPETS('you on+time 0 es you 0 at', 'test', 'none', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:32:"you on+time 0 es you 0 at";}}}i:4;a:3:{s:8:"sphinxql";s:73:"CALL SNIPPETS('you on+time 0 es you 0 at', 'test', '0', 1 AS query_mode )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:32:"you on+time 0 es you 0 at";}}}}}sphinx-2.0.4-release/test/test_112/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_112/test.xml0000644000176700017710000000317211421075337017773 0ustar deogardeogar fullscan on empty multiquery with @geodist indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT id, best_seller, attributes_id, lng, lat, text FROM test_table sql_attr_uint = best_seller sql_attr_uint = attributes_id sql_attr_float = lng sql_attr_float = lat } index products { source = srctest path = /test } CREATE TABLE test_table ( id integer primary key not null auto_increment, best_seller int not null default 0, attributes_id int not null default 0, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', text varchar(256) ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (best_seller, attributes_id, lng, lat, text) VALUES ( 1, 1, -0.0798578, 0.937717,'text1' ), ( 2, 1, -0.0799989, 0.891975, 'text2' ), ( 3, 2, -0.0721455, 0.926761, 'text3' ), ( 4, 2, -0.0721455, 0.926761, 'text4' ); SetArrayResult (true); $index = 'products'; $query = ""; $client->SetMatchMode (SPH_MATCH_EXTENDED2); // First query $client->SetSortMode (SPH_SORT_EXTENDED, 'best_seller ASC'); $client->AddQuery ($query, $index); //Second query $client->SetGeoAnchor ('lng','lat',-0.0798578,0.937717 ); $client->SetSortMode (SPH_SORT_EXTENDED, '@geodist DESC' ); $client->AddQuery ($query, $index); //Run queries $results = $client->RunQueries(); ]]> sphinx-2.0.4-release/test/test_112/model.bin0000644000176700017710000000436011455516446020074 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:3:"lng";i:5;s:3:"lat";i:5;}s:7:"matches";a:4:{i:0;a:3:{s:2:"id";i:1;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;}}i:1;a:3:{s:2:"id";i:2;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:2;s:13:"attributes_id";i:1;s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;}}i:2;a:3:{s:2:"id";i:3;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:3;s:13:"attributes_id";i:2;s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;}}i:3;a:3:{s:2:"id";i:4;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:4;s:13:"attributes_id";i:2;s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:3:"lng";i:5;s:3:"lat";i:5;s:8:"@geodist";i:5;}s:7:"matches";a:4:{i:0;a:3:{s:2:"id";i:2;s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:11:"best_seller";i:2;s:13:"attributes_id";i:1;s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:8:"@geodist";d:291086.09375;}}i:1;a:3:{s:2:"id";i:3;s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:11:"best_seller";i:3;s:13:"attributes_id";i:2;s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:8:"@geodist";d:85369.6015625;}}i:2;a:3:{s:2:"id";i:4;s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:11:"best_seller";i:4;s:13:"attributes_id";i:2;s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:8:"@geodist";d:85369.6015625;}}i:3;a:3:{s:2:"id";i:1;s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:8:"@geodist";d:0;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_072/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_072/test.xml0000644000176700017710000000220111323636205017766 0ustar deogardeogar multi-queries indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_hits_cache = 0 } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one' ), ( 2, 'one two' ), ( 3, 'one two three' ), ( 4, 'one two three four' ), ( 5, 'one two three four five' ) AddQuery ('one'); $client->AddQuery ('two'); $client->AddQuery ('three'); $client->AddQuery ('four'); $client->AddQuery ('five'); $results = $client->RunQueries (); for ( $i=0; $i<=4; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_072/model.bin0000644000176700017710000000400711227074350020065 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:5:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:5:"words";a:1:{s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:1:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}}i:4;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_031/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_031/test.xml0000644000176700017710000000252411036770122017767 0ustar deogardeogar geodist/expressions vs multiquery indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_attr_float = lat sql_attr_float = long } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `lat` float, `long` float, `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 0.5, 0.5, 'one' ), ( 2, 0.51, 0.5, 'one two' ), ( 3, 0.52, 0.5, 'one two three' ), ( 4, 0.5, 0.51, 'one two three four' ), ( 5, 0.5, 0.52, 'one two three four five' ) AddQuery ('one'); $client->SetGeoAnchor( 'lat','long',0.5, 0.5 ); $client->SetSortMode ( SPH_SORT_EXTENDED, '@geodist ASC' ); $client->AddQuery ('one'); $client->SetSortMode ( SPH_SORT_EXPR, '@id' ); $client->AddQuery ('one'); $results = $client->RunQueries (); for ( $i=0; $i<=2; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_031/model.bin0000644000176700017710000000532511455516446020076 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:3:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"lat";i:5;s:4:"long";i:5;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"lat";d:0.5;s:4:"long";d:0.5;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"lat";d:0.5099999904632568359375;s:4:"long";d:0.5;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"lat";d:0.519999980926513671875;s:4:"long";d:0.5;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"lat";d:0.5;s:4:"long";d:0.5099999904632568359375;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"lat";d:0.5;s:4:"long";d:0.519999980926513671875;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:3:{s:3:"lat";i:5;s:4:"long";i:5;s:8:"@geodist";i:5;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"lat";d:0.5;s:4:"long";d:0.5;s:8:"@geodist";d:0;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"lat";d:0.5;s:4:"long";d:0.5099999904632568359375;s:8:"@geodist";d:56024.76171875;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"lat";d:0.5099999904632568359375;s:4:"long";d:0.5;s:8:"@geodist";d:63839.9375;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"lat";d:0.519999980926513671875;s:4:"long";d:0.5;s:8:"@geodist";d:127679.875;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:3:"lat";d:0.5;s:4:"long";d:0.519999980926513671875;s:8:"@geodist";d:112049.203125;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:3:"lat";i:5;s:4:"long";i:5;s:8:"@geodist";i:5;s:5:"@expr";i:5;}s:7:"matches";a:5:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"lat";d:0.5;s:4:"long";d:0.519999980926513671875;s:8:"@geodist";d:112049.203125;s:5:"@expr";d:5;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"lat";d:0.5;s:4:"long";d:0.5099999904632568359375;s:8:"@geodist";d:56024.76171875;s:5:"@expr";d:4;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"lat";d:0.519999980926513671875;s:4:"long";d:0.5;s:8:"@geodist";d:127679.875;s:5:"@expr";d:3;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"lat";d:0.5099999904632568359375;s:4:"long";d:0.5;s:8:"@geodist";d:63839.9375;s:5:"@expr";d:2;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"lat";d:0.5;s:4:"long";d:0.5;s:8:"@geodist";d:0;s:5:"@expr";d:1;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}}}}sphinx-2.0.4-release/test/test_167/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_167/test.xml0000644000176700017710000000352511526217641020011 0ustar deogardeogar min_word_len vs tail of the query indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = sbcs charset_type = utf-8 charset_type = utf-8 exceptions = synonyms.txt enable_star = 1 min_word_len = 3 min_word_len = 4 min_word_len = 5 min_prefix_len = 3 min_prefix_len = 4 min_prefix_len = 5 } select * from test where match('@(author) luther @(title) ccc') CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `author` varchar(255) NOT NULL default '', `title` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'luther', 'a' ), ( 2, 'luther', 'bb' ), ( 3, 'luther', 'ccc' ), ( 4, 'luther', 'dddd' ), ( 5, 'luther', 'eeeee' ), ( 6, 'luther', 'ffffff' ), ( 7, 'luther', 'ggggggg' ), ( 8, 'luther', 'hello world' ), ( 9, 'luther', 'hello a world' ), ( 10, 'luther', 'hello aa world' ), ( 11, 'luther', 'hello aaa world' ), ( 12, 'luther', 'hello aaaa world' ), ( 13, 'luther', 'hello aaaaa world' ), ( 14, 'luther', 'hello me world' ), ( 15, 'luther', 'hello two world' ), ( 16, 'luther', 'hello four world' ) sphinx-2.0.4-release/test/test_167/model.bin0000644000176700017710000004563411526217641020111 0ustar deogardeogara:27:{i:0;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:1;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:2;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:3;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:4;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:5;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:6;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:7;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:8;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:9;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:10;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:11;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:12;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:13;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:14;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:15;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:16;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:17;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:18;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:19;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:20;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2500";}}}}i:21;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:22;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:23;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:24;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:25;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}i:26;a:1:{i:0;a:3:{s:8:"sphinxql";s:63:"select * from test where match('@(author) luther @(title) ccc')";s:10:"total_rows";i:16;s:4:"rows";a:16:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1277";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1277";}i:2;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1277";}i:3;a:2:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1277";}i:4;a:2:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1277";}i:5;a:2:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1277";}i:6;a:2:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1277";}i:7;a:2:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1277";}i:8;a:2:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1277";}i:9;a:2:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1277";}i:10;a:2:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1277";}i:11;a:2:{s:2:"id";s:2:"12";s:6:"weight";s:4:"1277";}i:12;a:2:{s:2:"id";s:2:"13";s:6:"weight";s:4:"1277";}i:13;a:2:{s:2:"id";s:2:"14";s:6:"weight";s:4:"1277";}i:14;a:2:{s:2:"id";s:2:"15";s:6:"weight";s:4:"1277";}i:15;a:2:{s:2:"id";s:2:"16";s:6:"weight";s:4:"1277";}}}}}sphinx-2.0.4-release/test/test_166/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_166/test.xml0000644000176700017710000000573611534034117020010 0ustar deogardeogar string attibutes vs agent groupings 3 indexer { mem_limit = 16M } searchd { workers = threads collation_libc_locale = C collation_server = utf8_general_ci collation_server = libc_ci collation_server = libc_cs } source src_master { type = mysql sql_query = SELECT id, idd1, str1, body FROM test_table where id in ( 1, 2, 3 ) sql_attr_uint = idd1 sql_attr_string = str1 } index master { source = src_master docinfo = extern charset_type = utf-8 path = /master } index all { type = distributed local = master agent = :a1 agent = :a2 } source src_a1 { type = mysql sql_query = SELECT id, idd1, str1, body FROM test_table where id in ( 4, 5, 6 ) sql_attr_uint = idd1 sql_attr_string = str1 } index a1 { source = src_a1 docinfo = extern charset_type = utf-8 path = /a1 } source src_a2 { type = mysql sql_query = SELECT id, idd1, str1, body FROM test_table where id in ( 7, 8, 9 ) sql_attr_uint = idd1 sql_attr_string = str1 } index a2 { source = src_a2 docinfo = extern charset_type = utf-8 path = /a2 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd1` int(11) NOT NULL default '0', `str1` varchar(1024) NOT NULL default '', `body` varchar(1024) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 'a aa', 'the dog' ), ( 2, 10, 'c cC', 'the cat' ), ( 3, 2, 'a aA', 'the bird' ), ( 4, 11, 'c cC', 'cat eats bird' ), ( 5, 3, 'a Aa', 'dog eats cat' ), ( 6, 12, 'c cc', 'bird' ), ( 7, 4, 'a Aa', 'the cat' ), ( 8, 5, 'a aA', 'eats' ), ( 9, 13, 'c cc', 'the' ) select * from all group by str1 order by @id asc select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 desc order by @weight asc set collation_connection=libc_cs select * from all group by str1 order by idd1 desc select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 asc order by @weight asc the | cat | dog | bird sphinx-2.0.4-release/test/test_166/model.bin0000644000176700017710000003520111526162354020075 0ustar deogardeogara:3:{i:0;a:7:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";s:4:"a aa";s:8:"@groupby";i:1286444629639636820;s:6:"@count";i:5;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:10;s:4:"str1";s:4:"c cC";s:8:"@groupby";i:195691322532063478;s:6:"@count";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:2:{i:9;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";i:13;s:4:"str1";s:4:"c cc";s:8:"@groupby";i:195691322532063478;s:6:"@count";i:4;}}i:7;a:2:{s:6:"weight";s:4:"2545";s:5:"attrs";a:4:{s:4:"idd1";i:4;s:4:"str1";s:4:"a Aa";s:8:"@groupby";i:1286444629639636820;s:6:"@count";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"the";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"cat";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"bird";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"the | cat | dog | bird";}i:2;a:3:{s:8:"sphinxql";s:48:"select * from all group by str1 order by @id asc";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:19:"1286444629639636820";s:6:"@count";s:1:"5";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:18:"195691322532063478";s:6:"@count";s:1:"4";}}}i:3;a:3:{s:8:"sphinxql";s:123:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 desc order by @weight asc";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"13";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:18:"195691322532063478";s:6:"@count";s:1:"4";}i:1;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:4:"2545";s:4:"idd1";s:1:"4";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:19:"1286444629639636820";s:6:"@count";s:1:"4";}}}i:4;a:2:{s:8:"sphinxql";s:32:"set collation_connection=libc_cs";s:14:"total_affected";i:0;}i:5;a:3:{s:8:"sphinxql";s:50:"select * from all group by str1 order by idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}}}i:6;a:3:{s:8:"sphinxql";s:122:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 asc order by @weight asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1545";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2500";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}}}}i:1;a:7:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";s:4:"a aa";s:8:"@groupby";i:-6704402370037900940;s:6:"@count";i:5;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:10;s:4:"str1";s:4:"c cC";s:8:"@groupby";i:-1004237807469493610;s:6:"@count";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:2:{i:9;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";i:13;s:4:"str1";s:4:"c cc";s:8:"@groupby";i:-1004237807469493610;s:6:"@count";i:4;}}i:7;a:2:{s:6:"weight";s:4:"2545";s:5:"attrs";a:4:{s:4:"idd1";i:4;s:4:"str1";s:4:"a Aa";s:8:"@groupby";i:-6704402370037900940;s:6:"@count";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"the";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"cat";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"bird";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"the | cat | dog | bird";}i:2;a:3:{s:8:"sphinxql";s:48:"select * from all group by str1 order by @id asc";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-6704402370037900940";s:6:"@count";s:1:"5";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-1004237807469493610";s:6:"@count";s:1:"4";}}}i:3;a:3:{s:8:"sphinxql";s:123:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 desc order by @weight asc";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"13";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-1004237807469493610";s:6:"@count";s:1:"4";}i:1;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:4:"2545";s:4:"idd1";s:1:"4";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-6704402370037900940";s:6:"@count";s:1:"4";}}}i:4;a:2:{s:8:"sphinxql";s:32:"set collation_connection=libc_cs";s:14:"total_affected";i:0;}i:5;a:3:{s:8:"sphinxql";s:50:"select * from all group by str1 order by idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}}}i:6;a:3:{s:8:"sphinxql";s:122:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 asc order by @weight asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1545";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2500";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}}}}i:2;a:7:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";s:4:"a aa";s:8:"@groupby";i:-4519871838247582502;s:6:"@count";i:1;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:10;s:4:"str1";s:4:"c cC";s:8:"@groupby";i:-2966083720972555100;s:6:"@count";i:2;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:2;s:4:"str1";s:4:"a aA";s:8:"@groupby";i:-4519907022619685254;s:6:"@count";i:2;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:3;s:4:"str1";s:4:"a Aa";s:8:"@groupby";i:-4550552610715127046;s:6:"@count";i:2;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";i:12;s:4:"str1";s:4:"c cc";s:8:"@groupby";i:-2966048536600452348;s:6:"@count";i:2;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";s:4:"a aa";s:8:"@groupby";i:-4519871838247582502;s:6:"@count";i:1;}}i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";i:2;s:4:"str1";s:4:"a aA";s:8:"@groupby";i:-4519907022619685254;s:6:"@count";i:1;}}i:9;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";i:13;s:4:"str1";s:4:"c cc";s:8:"@groupby";i:-2966048536600452348;s:6:"@count";i:2;}}i:4;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:4:{s:4:"idd1";i:11;s:4:"str1";s:4:"c cC";s:8:"@groupby";i:-2966083720972555100;s:6:"@count";i:2;}}i:7;a:2:{s:6:"weight";s:4:"2545";s:5:"attrs";a:4:{s:4:"idd1";i:4;s:4:"str1";s:4:"a Aa";s:8:"@groupby";i:-4550552610715127046;s:6:"@count";i:2;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"the";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"cat";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:3:"dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"bird";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"the | cat | dog | bird";}i:2;a:3:{s:8:"sphinxql";s:48:"select * from all group by str1 order by @id asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}}}i:3;a:3:{s:8:"sphinxql";s:123:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 desc order by @weight asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"13";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2500";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:4:"2545";s:4:"idd1";s:1:"4";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}}}i:4;a:2:{s:8:"sphinxql";s:32:"set collation_connection=libc_cs";s:14:"total_affected";i:0;}i:5;a:3:{s:8:"sphinxql";s:50:"select * from all group by str1 order by idd1 desc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:1;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}}}i:6;a:3:{s:8:"sphinxql";s:122:"select * from all where match ('the | cat | dog | bird') group by str1 within group order by idd1 asc order by @weight asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"a aa";s:8:"@groupby";s:20:"-4519871838247582502";s:6:"@count";s:1:"1";}i:1;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"2";s:4:"str1";s:4:"a aA";s:8:"@groupby";s:20:"-4519907022619685254";s:6:"@count";s:1:"1";}i:2;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"12";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:20:"-2966048536600452348";s:6:"@count";s:1:"2";}i:3;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1545";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"a Aa";s:8:"@groupby";s:20:"-4550552610715127046";s:6:"@count";s:1:"2";}i:4;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2500";s:4:"idd1";s:2:"10";s:4:"str1";s:4:"c cC";s:8:"@groupby";s:20:"-2966083720972555100";s:6:"@count";s:1:"2";}}}}}sphinx-2.0.4-release/test/test_004/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_004/test.xml0000644000176700017710000000400310744717627020001 0ustar deogardeogar prefix_fields/infix_fields (part 1) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 enable_star = 1 min_infix_len = 0 min_prefix_len = 3 prefix_fields = prefix_fields = subject prefix_fields = body prefix_fields = body, author infix_fields = infix_fields = SUBJECT infix_fields = body infix_fields = subject, AUTHOR } enab* grea* Mak* *ble* *thor* *oda* CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_004/model.bin0000644000176700017710000010215110723664234020065 0ustar deogardeogara:16:{i:0;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:1;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:2;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:3;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:4;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:5;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:6;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:7;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:8;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:9;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:10;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:11;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:12;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:13;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:14;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:15;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}}sphinx-2.0.4-release/test/test_111/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_111/test.xml0000644000176700017710000000274011421075337017772 0ustar deogardeogar fullscan on different result's schemas indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT id, best_seller, attributes_id, text FROM test_table sql_attr_uint = best_seller sql_attr_uint = attributes_id } index products { source = srctest path = /test } CREATE TABLE test_table ( id integer primary key not null auto_increment, best_seller int not null default 0, attributes_id int not null default 0, text varchar(256) ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (best_seller, attributes_id, text) VALUES ( 1, 1, 'text1' ), ( 2, 1, 'text2' ), ( 3, 2, 'text3' ), ( 4, 2, 'text4' ); SetArrayResult (true); $client->SetMatchMode (SPH_MATCH_EXTENDED2); $client->SetSortMode (SPH_SORT_EXTENDED, 'best_seller ASC'); $index = 'products'; $query = ""; // First Query - contains only usual attrs in the schema $client->AddQuery ($query, $index); //Second query - contains usual attrs, and also @group_by and @count attrs in the schema $client->SetGroupBy ('attributes_id', SPH_GROUPBY_ATTR, 'attributes_id desc'); $client->AddQuery ($query, $index); //Run queries $results = $client->RunQueries(); ]]> sphinx-2.0.4-release/test/test_111/model.bin0000644000176700017710000000247211455516446020075 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;}s:7:"matches";a:4:{i:0;a:3:{s:2:"id";i:1;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;}}i:1;a:3:{s:2:"id";i:2;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:2;s:13:"attributes_id";i:1;}}i:2;a:3:{s:2:"id";i:3;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:3;s:13:"attributes_id";i:2;}}i:3;a:3:{s:2:"id";i:4;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:4;s:13:"attributes_id";i:2;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"3.689";s:5:"query";s:0:"";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:0;a:3:{s:2:"id";i:3;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:3;s:13:"attributes_id";i:2;s:8:"@groupby";i:2;s:6:"@count";i:2;}}i:1;a:3:{s:2:"id";i:1;s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;s:8:"@groupby";i:1;s:6:"@count";i:2;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"3.689";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_040/0000755000176700017710000000000011724063141016263 5ustar deogardeogarsphinx-2.0.4-release/test/test_040/test.xml0000644000176700017710000000316111323636205017767 0ustar deogardeogar snippets vs wildcard matches indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT id, body FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 min_word_len = 1 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test' ) $results = array (); $opts = array ( 'before_match' => '[B]', 'after_match' => '[A]', 'chunk_separator' => ' ... ', 'limit' => 100, 'around' => 2, ); $tests = array ( array ( 'docs' => array('He caught fish in deep pools with invisible fingers and ate them raw.'), 'q' => array ( 'he* fin*', '*gers *raw', 'fin* *gers', '*augh* *is*', '*pools*', ), ), // utf-8 array ( 'docs' => array('Ðевидимыми пальцами ловил он рыбу в глубоких омутах и ел её Ñырой.'), 'q' => array ( 'невидимыми* пальц*', '*цами *Ñырой', 'пальц* *цами', '*ови* *Ñ‹*', '*омутах*', ), ), ); foreach ( $tests as $test ) { foreach ( $test['q'] as $words ) { $results [] = $words; $res = $client->BuildExcerpts ( $test['docs'], 'test_idx', $words, $opts ); if ( !$res ) { $results = false; return; } $results [] = $res; } } sphinx-2.0.4-release/test/test_040/model.bin0000644000176700017710000000306311053325336020061 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:20:{i:0;s:8:"he* fin*";i:1;a:1:{i:0;s:81:"[B]He[A] caught fish in deep pools with invisible [B]fingers[A] and ate them raw.";}i:2;s:10:"*gers *raw";i:3;a:1:{i:0;s:81:"He caught fish in deep pools with invisible [B]fingers[A] and ate them [B]raw[A].";}i:4;s:10:"fin* *gers";i:5;a:1:{i:0;s:75:"He caught fish in deep pools with invisible [B]fingers[A] and ate them raw.";}i:6;s:11:"*augh* *is*";i:7;a:1:{i:0;s:87:"He [B]caught[A] [B]fish[A] in deep pools with [B]invisible[A] fingers and ate them raw.";}i:8;s:7:"*pools*";i:9;a:1:{i:0;s:75:"He caught fish in deep [B]pools[A] with invisible fingers and ate them raw.";}i:10;s:33:"невидимыми* пальц*";i:11;a:1:{i:0;s:132:"[B]Ðевидимыми[A] [B]пальцами[A] ловил он рыбу в глубоких омутах и ел её Ñырой.";}i:12;s:21:"*цами *Ñырой";i:13;a:1:{i:0;s:132:"Ðевидимыми [B]пальцами[A] ловил он рыбу в глубоких омутах и ел её [B]Ñырой[A].";}i:14;s:21:"пальц* *цами";i:15;a:1:{i:0;s:126:"Ðевидимыми [B]пальцами[A] ловил он рыбу в глубоких омутах и ел её Ñырой.";}i:16;s:13:"*ови* *Ñ‹*";i:17;a:1:{i:0;s:144:"[B]Ðевидимыми[A] пальцами [B]ловил[A] он [B]рыбу[A] в глубоких омутах и ел её [B]Ñырой[A].";}i:18;s:14:"*омутах*";i:19;a:1:{i:0;s:126:"Ðевидимыми пальцами ловил он рыбу в глубоких [B]омутах[A] и ел её Ñырой.";}}}}sphinx-2.0.4-release/test/bench/0000755000176700017710000000000011724063141016000 5ustar deogardeogarsphinx-2.0.4-release/test/bench/fullscan.php0000644000176700017710000000156611222711734020331 0ustar deogardeogar RT:Ranker reset vs NEAR indexer { mem_limit = 32M } searchd { } index rt { type = rt path = data/rt rt_attr_uint = id1 rt_field = title } insert into rt (id, id1, title) values (1, 1, 'test text') insert into rt (id, id1, title) values (2, 2, 'testy texts') select * from rt select * from rt where match('\"the\" NEAR/15 \"home\" | \"one\" NEAR/15 perfume | \"one million\" NEAR/15 perfumes | \"one million\" NEAR/15 fragrance | \"one million\" NEAR/15 fragrances | \"one million\" NEAR/15 cologne | \"one million\" NEAR/15 essence | \"one million\" NEAR/15 aroma | \"one million\" NEAR/15 scent | \"one million\" NEAR/15 scents | \"one million\" NEAR/15 balm | \"one million\" NEAR/15 balms | \"one million\" NEAR/15 odor | \"one million\" NEAR/15 odour | \"one million\" NEAR/15 smell | \"one million\" NEAR/15 fragrant | \"one million\" NEAR/15 flavour | \"one million\" NEAR/15 flavor | \"one million\" NEAR/15 flavours | \"one million\" NEAR/15 flavors | \"one million\" NEAR/15 \"eau de toilette\" | \"one million\" NEAR/15 \"shower\" \"one million\" NEAR/15 fragrances | \"one million\" NEAR/15 cologne | \"one million\" NEAR/15 essence | \"one million\" NEAR/15 aroma | \"one million\" NEAR/15 scent | \"one million\" NEAR/15 scents | \"one million\" NEAR/15 balm | \"one million\" NEAR/15 balms | \"one million\" NEAR/15 odor | \"one million\" NEAR/15 odour | \"one million\" NEAR/15 smell | \"one million\" NEAR/15 fragrant | \"one million\" NEAR/15 flavour | \"one million\" NEAR/15 flavor \"the\" NEAR/15 \"home\" | \"one\" NEAR/15 perfume | \"one million\" NEAR/15 perfumes | \"one million\" NEAR/15 fragrance | \"one million\" NEAR/15 fragrances | \"one million\" NEAR/15 cologne | \"one million\" NEAR/15 essence | \"one million\" NEAR/15 aroma | \"one million\" NEAR/15 scent | \"one million\" NEAR/15 scents | \"one million\" NEAR/15 balm | \"one million\" NEAR/15 balms | \"one million\" NEAR/15 odor | \"one million\" NEAR/15 odour | \"one million\" NEAR/15 smell | \"one million\" NEAR/15 fragrant | \"one million\" NEAR/15 flavour | \"one million\" NEAR/15 flavor | \"one million\" NEAR/15 flavours | \"one million\" NEAR/15 flavors | \"one million\" NEAR/15 \"eau de toilette\" | \"one million\" NEAR/15 \"shower\" \"one million\" NEAR/15 fragrances | \"one million\" NEAR/15 cologne | \"one million\" NEAR/15 essence | \"one million\" NEAR/15 aroma | \"one million\" NEAR/15 scent | \"one million\" NEAR/15 scents | \"one million\" NEAR/15 balm | \"one million\" NEAR/15 balms | \"one million\" NEAR/15 odor | \"one million\" NEAR/15 odour | \"one million\" NEAR/15 smell | \"one million\" NEAR/15 fragrant | \"one million\" NEAR/15 flavour | \"one million\" NEAR/15 flavor') sphinx-2.0.4-release/test/bench/quorum-or.xml0000644000176700017710000000171611327277566020516 0ustar deogardeogar quorum vs or indexer { mem_limit = 128M } searchd { } source lj { type = mysql sql_query = select id, \ title, uncompress(content) content, \ unix_timestamp(published) published, channel_id \ from posting \ where id <= 1000000; sql_attr_uint = channel_id sql_attr_timestamp = published } index lj { source = lj path = data/lj } one|two|three|four nine|ten five|six|seven|eight "one two three four"/1 "nine ten"/1 "five six seven eight"/1 sphinx-2.0.4-release/test/bench/fullscan.xml0000644000176700017710000000172711222711734020341 0ustar deogardeogar fullscan indexer { mem_limit = 128M } searchd { } source fullscan { type = mysql sql_query = select id, value, text from fullscan sql_attr_uint = value } index fullscan { source = fullscan path = data/fullscan docinfo = extern } dummy dummy dummy sphinx-2.0.4-release/test/bench/match-modes.xml0000644000176700017710000000136311476235176020743 0ustar deogardeogar match modes indexer { mem_limit = 128M } searchd { } source lj { type = mysql sql_query = select id, \ title, uncompress(content) content, \ unix_timestamp(published) published, channel_id \ from posting \ where id <= 1000000; sql_attr_uint = channel_id sql_attr_timestamp = published } index lj { source = lj path = data/lj } sphinx-2.0.4-release/test/test_177/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_177/test.xml0000644000176700017710000000614711560064322020007 0ustar deogardeogar dict=keywords vs crc collisions indexer { mem_limit = 24M } searchd { } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index test { source = test path = /test dict = keywords } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 1, 'hello eisenhower' ), ( 2, 1, 'got petfood' ), ( 11, 2, '70_129_148_165' ), ( 12, 2, 'c27e6' ), ( 13, 2, '70_129_148_165' ), ( 14, 2, '2242647928' ), ( 15, 2, 'c27e6' ), ( 16, 2, 'c27e6' ), ( 21, 3, 'r57rm9qgtx4e' ), ( 22, 3, 'i59oqce31j1i' ), ( 23, 3, 'wf68itlphvim' ), ( 31, 4, '7mx9432vwcql' ), ( 32, 4, '7voy0zfvaxva' ), ( 33, 4, 'h9ws5m4gfju6' ), ( 34, 4, 'kbnbi9uxnxun' ), ( 41, 5, 'bx7ysl617uih' ), ( 42, 5, 'veqihmlt2u94' ), ( 43, 5, 'rbuslwua0rjw' ), ( 44, 5, 'iil5o9qal9zp' ), ( 51, 6, 'b4ps2tkcartn' ), ( 52, 6, 'ec3bedwxkjap' ), ( 53, 6, 'hcy80ejxaxy5' ), ( 54, 6, 'zoplbj4l38ar' ), ( 61, 7, '47m6max3ldms' ), ( 62, 7, 'btdgsp6fahbo' ), ( 63, 7, 'jx9t6gshfdb3' ), ( 64, 7, '0nqiwe8ap0nt' ), ( 71, 8, 'a4qrs2snufyd' ), ( 72, 8, 'dnvaj4d6w64n' ), ( 73, 8, 'r9g3s3ry1snj' ), ( 74, 8, 'w6w9m94wxwja' ) eisenhower petfood 70_129_148_165 c27e6 2242647928 r57rm9qgtx4e i59oqce31j1i wf68itlphvim 7mx9432vwcql 7voy0zfvaxva h9ws5m4gfju6 kbnbi9uxnxun bx7ysl617uih veqihmlt2u94 rbuslwua0rjw iil5o9qal9zp b4ps2tkcartn ec3bedwxkjap hcy80ejxaxy5 zoplbj4l38ar 47m6max3ldms btdgsp6fahbo jx9t6gshfdb3 0nqiwe8ap0nt a4qrs2snufyd dnvaj4d6w64n r9g3s3ry1snj w6w9m94wxwja sphinx-2.0.4-release/test/test_177/model.bin0000644000176700017710000003072611560064322020100 0ustar deogardeogara:1:{i:0;a:28:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:10:"eisenhower";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"eisenhower";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"petfood";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"petfood";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:11;a:2:{s:6:"weight";s:4:"1677";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}i:13;a:2:{s:6:"weight";s:4:"1677";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:14:"70_129_148_165";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:"70_129_148_165";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:12;a:2:{s:6:"weight";s:4:"1648";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}i:15;a:2:{s:6:"weight";s:4:"1648";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}i:16;a:2:{s:6:"weight";s:4:"1648";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"c27e6";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"c27e6";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:14;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:10:"2242647928";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"2242647928";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"r57rm9qgtx4e";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"r57rm9qgtx4e";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:22;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"i59oqce31j1i";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"i59oqce31j1i";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:23;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"wf68itlphvim";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"wf68itlphvim";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:31;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"4";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"7mx9432vwcql";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"7mx9432vwcql";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:32;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"4";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"7voy0zfvaxva";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"7voy0zfvaxva";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:33;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"4";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"h9ws5m4gfju6";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"h9ws5m4gfju6";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:34;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"4";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"kbnbi9uxnxun";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"kbnbi9uxnxun";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:41;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"5";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"bx7ysl617uih";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"bx7ysl617uih";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:42;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"5";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"veqihmlt2u94";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"veqihmlt2u94";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:43;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"5";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"rbuslwua0rjw";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"rbuslwua0rjw";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:44;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"5";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"iil5o9qal9zp";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"iil5o9qal9zp";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:51;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"6";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"b4ps2tkcartn";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"b4ps2tkcartn";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:52;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"6";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"ec3bedwxkjap";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"ec3bedwxkjap";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:53;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"6";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"hcy80ejxaxy5";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"hcy80ejxaxy5";}i:19;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:54;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"6";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"zoplbj4l38ar";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"zoplbj4l38ar";}i:20;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:61;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"7";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"47m6max3ldms";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"47m6max3ldms";}i:21;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:62;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"7";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"btdgsp6fahbo";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"btdgsp6fahbo";}i:22;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:63;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"7";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"jx9t6gshfdb3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"jx9t6gshfdb3";}i:23;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:64;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"7";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"0nqiwe8ap0nt";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"0nqiwe8ap0nt";}i:24;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:71;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"8";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"a4qrs2snufyd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"a4qrs2snufyd";}i:25;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:72;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"8";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"dnvaj4d6w64n";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"dnvaj4d6w64n";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:73;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"8";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"r9g3s3ry1snj";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"r9g3s3ry1snj";}i:27;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:74;a:2:{s:6:"weight";s:4:"1725";s:5:"attrs";a:1:{s:3:"gid";s:1:"8";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:12:"w6w9m94wxwja";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"w6w9m94wxwja";}}}sphinx-2.0.4-release/test/test_093/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_093/test.xml0000644000176700017710000000252111300513725017772 0ustar deogardeogar merge vs different min docids indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM test_table WHERE document_id in (101,102,103,104) } source srcdelta : srcmain { sql_query = SELECT * FROM test_table WHERE document_id in (1001,1002,1003,1004) } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta one two three four five CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 101, 'one' ), ( 102, 'two' ), ( 103, 'three crazy mice live happly' ), ( 104, 'four' ), ( 1001, 'seven' ), ( 1002, 'six two' ), ( 1003, 'five but now two' ), ( 1004, 'four' ) sphinx-2.0.4-release/test/test_093/model.bin0000644000176700017710000000414211300513725020064 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"one";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:1002;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:1003;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:3:"two";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"two";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:103;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:5:"three";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"three";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:104;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:1004;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"four";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"four";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1003;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"five";}}}sphinx-2.0.4-release/test/test_156/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_156/test.xml0000644000176700017710000000555211605620330020000 0ustar deogardeogar ranged-query vs joined field and mva attribute indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_range_step = 2 sql_query = SELECT id, gid, title FROM test sql_joined_field = text from ranged-query; select id, text from test where id between $start and $end; select 1, 5 sql_attr_uint = gid sql_attr_multi = uint mva from ranged-query; select id, mva from mva where id between $start and $end; select 2, 5 sql_attr_multi = bigint mva from ranged-query; select id, mva from mva where id between $start and $end; select 2, 5 } index test { source = test path = /test dict = keywords min_prefix_len = 1 enable_star = 1 } create table test ( id int not null, gid int not null, title varchar(255) not null, text varchar(255) not null ); create table mva ( id int not null, mva int not null ); drop table if exists test; drop table if exists mva; insert into test values ( 1, 101, 'not found', 'notice found' ); insert into test values ( 2, 102, 'not anyone', 'notice goods' ); insert into test values ( 3, 103, 'not grant', 'notice goods' ); insert into test values ( 4, 104, 'not grant', 'everyone' ); insert into test values ( 5, 105, 'not anyone', 'grant' ); insert into test values ( 6, 106, 'not fun', 'natural' ); insert into mva values (1, 1001), (1, 2001); insert into mva values (2, 1002), (2, 2002); insert into mva values (3, 1003), (3, 2003); insert into mva values (4, 1004), (4, 2004); insert into mva values (5, 1005), (5, 2005); insert into mva values (6, 1006), (6, 2006); select * from test select * from test where mva >= 2004 and mva < 2007 select * from test where match ('goo*') select * from test where match ('g*') select * from test where match ('g* !(@title g*)') select * from test where match ('everyone') select * from test where match ('anyone | everyone') select * from test where match ('@text g*') select * from test where match ('natural') select * from test where mva=1001 sphinx-2.0.4-release/test/test_156/model.bin0000644000176700017710000001470211605620330020066 0ustar deogardeogara:2:{i:0;a:10:{i:0;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:3:"101";s:3:"mva";s:0:"";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:3;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:4;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}i:5;a:4:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"gid";s:3:"106";s:3:"mva";s:0:"";}}}i:1;a:3:{s:8:"sphinxql";s:51:"select * from test where mva >= 2004 and mva < 2007";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:1;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:2;a:3:{s:8:"sphinxql";s:39:"select * from test where match ('goo*')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1607";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1607";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}}}i:3;a:3:{s:8:"sphinxql";s:37:"select * from test where match ('g*')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2570";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:4;a:3:{s:8:"sphinxql";s:50:"select * from test where match ('g* !(@title g*)')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:5;a:3:{s:8:"sphinxql";s:43:"select * from test where match ('everyone')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1709";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}}}i:6;a:3:{s:8:"sphinxql";s:52:"select * from test where match ('anyone | everyone')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1604";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:7;a:3:{s:8:"sphinxql";s:43:"select * from test where match ('@text g*')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:2;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:8;a:2:{s:8:"sphinxql";s:42:"select * from test where match ('natural')";s:10:"total_rows";i:0;}i:9;a:2:{s:8:"sphinxql";s:33:"select * from test where mva=1001";s:10:"total_rows";i:0;}}i:1;a:10:{i:0;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:3:"101";s:3:"mva";s:0:"";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:3;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:4;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}i:5;a:4:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"gid";s:3:"106";s:3:"mva";s:0:"";}}}i:1;a:3:{s:8:"sphinxql";s:51:"select * from test where mva >= 2004 and mva < 2007";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:1;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:2;a:3:{s:8:"sphinxql";s:39:"select * from test where match ('goo*')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1607";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1607";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}}}i:3;a:3:{s:8:"sphinxql";s:37:"select * from test where match ('g*')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2570";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:3;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:4;a:3:{s:8:"sphinxql";s:50:"select * from test where match ('g* !(@title g*)')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:5;a:3:{s:8:"sphinxql";s:43:"select * from test where match ('everyone')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1709";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}}}i:6;a:3:{s:8:"sphinxql";s:52:"select * from test where match ('anyone | everyone')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1604";s:3:"gid";s:3:"104";s:3:"mva";s:9:"1004,2004";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:2;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:7;a:3:{s:8:"sphinxql";s:43:"select * from test where match ('@text g*')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"102";s:3:"mva";s:9:"1002,2002";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1553";s:3:"gid";s:3:"103";s:3:"mva";s:9:"1003,2003";}i:2;a:4:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1516";s:3:"gid";s:3:"105";s:3:"mva";s:9:"1005,2005";}}}i:8;a:2:{s:8:"sphinxql";s:42:"select * from test where match ('natural')";s:10:"total_rows";i:0;}i:9;a:2:{s:8:"sphinxql";s:33:"select * from test where mva=1001";s:10:"total_rows";i:0;}}}sphinx-2.0.4-release/test/test_059/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_059/test.xml0000644000176700017710000000162411153743430020003 0ustar deogardeogar phrase boundaries indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test phrase_boundary = . phrase_boundary_step = 10 } CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `text` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'first.' ), ( 2, ' second' ), ( 3, 'one. two three' ), ( 4, 'one two three' ); ^second "one two" "one two"~10 "one two"~11 sphinx-2.0.4-release/test/test_059/model.bin0000644000176700017710000000345711156360750020105 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"second";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"^second";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""one two"";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""one two"~10";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""one two"~11";}}}sphinx-2.0.4-release/test/test_148/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_148/doc2.txt0000644000176700017710000007176511451070445017706 0ustar deogardeogardummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy text dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy text dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy sphinx-2.0.4-release/test/test_148/doc1.txt0000644000176700017710000000003111451070445017657 0ustar deogardeogardummy textsphinx-2.0.4-release/test/test_148/doc3.txt0000644000176700017710000002075711451070445017702 0ustar deogardeogardummy dummy dummy dummy dummy dummy dummy dummy dummy dummy text dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy text dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy text dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy dummy sphinx-2.0.4-release/test/test_148/test.xml0000644000176700017710000000423111540436502017776 0ustar deogardeogar field position limit vs many hits indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd sql_file_field = body } index test { source = test path = /test docinfo = extern html_strip = 1 index_sp = 1 index_zones = zone* } source start_end { type = mysql sql_query = SELECT id, body, 1 as idd FROM start_end_table sql_attr_uint = idd sql_field_string = body } index start_end { source = start_end path = /start_end docinfo = extern } CREATE TABLE test_table ( id INT NOT NULL, body VARCHAR(2048) NOT NULL, idd INT NOT NULL ); CREATE TABLE start_end_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, body VARCHAR(64) NOT NULL ); drop table if exists test_table; drop table if exists start_end_table; insert into test_table values ( 1, 'test_148/doc1.txt', 1 ); insert into test_table values ( 2, 'test_148/doc2.txt', 2 ); insert into test_table values ( 3, 'test_148/doc3.txt', 3 ); select * from test where match ('ZONE:zoneA dummy') select * from test where match ('@body[1024] text') select * from test where match ('@body[1024] dummy') select * from start_end where match ('@body ABB') group by idd select * from start_end where match ('@body ^ABB') group by idd select * from start_end where match ('@body ABB$') group by idd select * from start_end where match ('@body ^ABB$') group by idd sphinx-2.0.4-release/test/test_148/model.bin0000644000176700017710000000364111537625505020104 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:3:{s:8:"sphinxql";s:51:"select * from test where match ('ZONE:zoneA dummy')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1104";s:3:"idd";s:1:"2";}}}i:1;a:3:{s:8:"sphinxql";s:51:"select * from test where match ('@body[1024] text')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1216";s:3:"idd";s:1:"3";}}}i:2;a:3:{s:8:"sphinxql";s:52:"select * from test where match ('@body[1024] dummy')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1104";s:3:"idd";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1104";s:3:"idd";s:1:"3";}}}i:3;a:3:{s:8:"sphinxql";s:62:"select * from start_end where match ('@body ABB') group by idd";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1272";s:4:"body";s:3:"ABB";s:3:"idd";s:1:"1";s:8:"@groupby";s:1:"1";s:6:"@count";s:3:"600";}}}i:4;a:3:{s:8:"sphinxql";s:63:"select * from start_end where match ('@body ^ABB') group by idd";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1272";s:4:"body";s:3:"ABB";s:3:"idd";s:1:"1";s:8:"@groupby";s:1:"1";s:6:"@count";s:3:"600";}}}i:5;a:3:{s:8:"sphinxql";s:63:"select * from start_end where match ('@body ABB$') group by idd";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1272";s:4:"body";s:3:"ABB";s:3:"idd";s:1:"1";s:8:"@groupby";s:1:"1";s:6:"@count";s:3:"600";}}}i:6;a:3:{s:8:"sphinxql";s:64:"select * from start_end where match ('@body ^ABB$') group by idd";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1272";s:4:"body";s:3:"ABB";s:3:"idd";s:1:"1";s:8:"@groupby";s:1:"1";s:6:"@count";s:3:"599";}}}}}sphinx-2.0.4-release/test/test_188/0000755000176700017710000000000011724063141016300 5ustar deogardeogarsphinx-2.0.4-release/test/test_188/refdata/0000755000176700017710000000000011724063141017706 5ustar deogardeogarsphinx-2.0.4-release/test/test_188/refdata/rt_v4.kill0000644000176700017710000000000411634640227021621 0ustar deogardeogarsphinx-2.0.4-release/test/test_188/refdata/rt_v2.meta0000644000176700017710000000004011634640227021612 0ustar deogardeogarSPRTsphinx-2.0.4-release/test/test_188/refdata/rt_v5.meta0000644000176700017710000000037511634640227021630 0ustar deogardeogarSPRTcontentÿÿÿÿÿÿÿÿÿÿÿÿidd 0sphinx-2.0.4-release/test/test_188/refdata/rt_v5.ram0000644000176700017710000002646211634640227021466 0ustar deogardeogarÑè娭ÏÓŸx䔑­Ÿg ½Ç ¸ðù  ±à‡ ÅÖq ©¬† ÅÉ‘ç‡ÉŽÇø¯ß—èñ®¼‚À Ñ߈ ½¹p ØŽš Í hŨŽ ´þ¿ ™ÍÉ’—÷Õçëïê½¶ ± xä« ½©ð Û¯†½¶î ¸úà ²žË Ó¯~ Ó߇ Ÿ­€ŧn ­ß˜¶ú ŸpÏßç½Èo Íà—Žï… ½×q±¬ö¢Œ‘iî­×À Š÷†™Ñ¯Ÿh –§‰ã‰À›¶ …¾9‰´ˆ ˜–õÅÙ‘ÑŸwÙùD »¨ñÝï»·ï îü‡ í¯ñ»·‘ ³ßg葚 æo ¯ ‡ ¶‚ ùqœ™Å²ÙR Õïe Ñ ˜ã÷Ž®¦Ó «’¿ Š·÷—´‰»Én ÏŸˆ Žðý »Öp§Ñþ˜×ŒÃØÓàh ÜËŽ ­àw–çøåÏ’ž¹¨¿Ê˜Ô¨ ¯àx »ÆŽ ᤀ  Ñàg§‚ÃÈœ”ﳟ˜ Ã×p ¶×™ Ó — »¸q “Ìö އ‰­ ˆèµ/ƒ»Ò ’׆ÔùÏßwé õ’Ä öȈÍ߇Ä‚ÀݯzûÁ4 ³ß˜ êèî »¦Ž ÷Ëù  ꘇÑ gé÷¶’·‡±ŸhÜ”‘ÏŸw ÃÉ ¥ä¿ Èñ嫘À޹‰ ²ÞL áÍ© »íº­­àˆ“¥ÃÆo Ë«ø  Ñà˜òøö›Ï}ÍŸxî¨ù‰¹Î¥ÒɳŸg ¯†æ —ÒÇ–Ùø Ü« ­ w–‡ù×°þ±ß—Š×ö—Ô‰»©n Ï߈ ™ÎÈ þ®òÓŸ‡ò¸‡­´ùÕÈ  ðþ`ë‘Ö §Õ¸±àx äËŽ Ïàgî舗ˆí¯ßh–Lj´þ¿•Õ¶–™‰ ø€ç ÐŽ½¨o ð‡Ø³í ›É¹ ‰â·³ ˆŠ—‡èÖ¦ص/ƒº2 ®æT ∦ŒúQöˆùÏ ˜éîý êØöÓßx‘Ôø ½§º§æ Íàh ÅÈŽ£¡À ªŠÉ’÷ö³àw¥‘~ ާù¯Ÿ—÷¶¯ ­Ý¿Žùø ²™Õ õ˜ÿ ÏàwöØö©Ô‰ŦŽ ±àh ·ÉÈ Èñ‰¹ÕÇêÖø ä¬o ÓŸ—êˆù‡ˆ‰³ ˜õòñ£¾ ‡·®‹ÕÉÍ ‡ Øîù íÊ. Ðþ`õ«ø  ¯ßxŽ÷ö›Îq¥ŸÀò¶‰ ÎÙ¬ ßÒI òèˆÓßhÕŽ Ï ˆä“q± — ½Æo «ã¿ ì¤ëÅ©n ÉËù  –—‡¯Ÿ‡™ñ‚ŠÇˆ³àgÌý¿ØõN û¹K Ò¦- ÞÊ• ‰‡¸ò¨ùÑ wù¸Ï£Ú¿òöø Ξ5 šÓÄ ³ßˆ »Çn Û¢–šÆ…ÉîøöÍß—ù÷Ò–×ö­à˜¯Ôøèo °ÝÊ ÕïƒÂM ÏŸg Ò…˜ Їù±ŸxýÑç ³ŸwµÎ¹ çÝ×ÍŸh ö˜‡§¯Ñàˆ êȈ¼À»Ò¶ê–‰ ôž‘ ‰Ò¸ÏߘÜÌn ±ß‡ ’çˆëЂ ­ g Ž·‡“´ù»ÈŽ ‰¥…ôÍŽÑ ˆꨉíi»Ö ·´ˆ ×¥²î»Öq¯ — øðå ÃÇ ³ßw ¢ð… ÃØ‘¬öÒæª £ÐÀ öø†Þ®­Ι+ ݯž ­àg¡«€»¨Ž Óàx šú »·Þ×ðåïŠùp ÏŸ˜ܬo æÕ¯fùŽ ¨´Ù ³Ÿˆ»§nóˉ  î˜÷莖òÈøÑàwèõN ´ý¿ ¯àh öʘí Ïßg »Ù‘—¬† »ÆñíÇô  ÄØÜ÷×Ü“q Ó ‡ ÃÈo±Š µÍ¸î؆³Ôù±à—½¦o ¨‘†  ­Ÿw Å·‘ ­Ìö òˆ‰ÓŸhè±ÏÖÝ ½Ç𤓠͠x ½Ø Èïå Ñߘ ÅÉn ‡µÑ Øí®¯ß‡ù´¡˜ØŒ½×p­ßˆ äÌn Óß—êèøëÏrÀ™Î ý½@™‘‚ Ò¡3 ¥•¿ ö¸÷©´‰ÅÆŽ ± h ¢ïý ÅÙ¤¾ëá½½© ÑŸg š‚ ½¶‘¤—ÅÎÞ²Ûï… ¯Ÿx³Èôŧ‘Óï}Ÿ Òûç 㯑Ÿq Íà‡÷£ •Çò ÷³‡»È ð—ë §·—ÃÙn Ï«ù ÃÆŽ·Éëâ¾ß »©üšþ—¹Š»¶o Û° ç‘ ò‚ ׫öâþ ™³áðù»¹ óËø»¦ï­°•»¹ñ Èùà ÖI ¥°m Éž¾Ù èq •ð ÷o±à÷»Çp ­ÐŽ »Øn׫‰ÊâÈ É͸ ç΂‰…uºÄù•šÃÖð ï³÷ 虇ÃÖŽÖš¿ ½§î«ð޽¸ð’û— ›°â ½¸Ž åî‚Å©p “°š ŶnÜš· ÖÚÐ £ðv Ô—˜Ê¥QËÊÉ ñ³ö½Æq «• ½ÙoÙÎý ÅÈ‘£íÅ×É«ˆ ÅÈï“Ðåøá¿ ‚¹³ž ½É‘ ÷¸® À‚^—Çë ÙÅǵÉòϳ÷ ¥·ŽÅØo ⾟ •¹“½·n õˉ ½¨Ž³ßüýÅ> åÉÇ’èŦ òú ‘„Úŧ´À«°iÊ¢O žçì ½©‘ ‘̆  ½¶ñ—ø´ÅÙðͳ÷ÅÆ ‹Ö¶ ÇÒ¶ÖÝÎ ŸÎ: ½ÈŽòØàûÁ5 µÏM ú"­¬‡  ÅÖœéÙÐ à ½Çî÷ºÑÛѶž¦Þ éѸŽƒ˜ Ũ﩯ý½¹‘ÁÉ™°Š]½¦q šßÀ  Ãב˳†ƒº3 ÀŽ> ’Å ÙÖÇó«ø »Æïé‡ïéîÄý¿ëÖɪ§ »§p —Ì÷  »¸˜Õ¤­ðrÊåÖ ’…è æŽ—ñþ»·ð÷Ó‡»¨ ù‡Í ÃÇ‘ «¬ö  ÃØñÛŽ}»É‰µ°ÅÕÇöøŸÿ ¥#ƒ›ü öñÏÓöé‘ Ð˜É  ÇÖ¸²„ø »§ù‡‹ÃÈŽù´±õÖ¶Û‚ û¹J ÀŒF “¬‡  »ÙðíÍréÕ¶ªÚ. ¡ÑÚ ÃÇî«‘‚ »Öó³÷»Ép 釩 æq ¯Ì†  ù‘çï¼þ¿¥°‰¶¥¯ ‘ÒÍ ÅÆï‡»ÐëÒǪ· »±Ê½Èq •¬ö  ½×‘‡øÐŸ€ýœ© ï%ŧp ÐÞ¸  £ð’¶â6 ®‚ˆ ½©îáð…Š]Ũ ©Ì÷  Å·ðÙ°‚½¦ŽÌ‚ÀÅÑÉ¢¶‰ ÷ÑÇÍ«ø ÅÉ•Ï ½Øñòñ¹ƒÂL Ín °ú"‹õƒ ½ÆŽ›z½Ù ѳ÷ è–‡½ÙnÙŽþ ÅÈ £Ðî Å׎髉¶å( çʸ ¢­¾ªš© «° ²Œø½§‘ ›ðá ½¸ÝÅòÈ(“ð™Ŷo ÍËøÅ©£°uŶ‘ ¥ÔÞ ½¨q Þ‚ é«ö…ÆG Éɶ¢¨Ŧïôœž¹·êŹ Þ¾ß ‰¹÷½ÖŽ ñ«ù ½Éî™Ç‹ÚÖÊ À€&»É’ ɳ‡ÅÇp Ìλ Ά ®Šˆ»©p Þú ’è»·óøŽ Ëˉ ç¯»Èï›Ç’ñ³÷ ‹¹î»× çÐý ÃÆq Ų+¹É‹ ‡³žè×þ»Øo÷«ˆ »ÇÐa»Ø‘ çŽ ¶¢± åÍÉ ϳöÃÉ‘ ¥õ ÃÖ±¡ˆ»¦ °ú »¹Žä”·ªÝ0 ­ð– –èî莥ðn÷ ®ü÷ •°‚÷n >"F$ {žêº;H['øðv68uy|Ds­ÈRµ×’¬aó D,÷n1 $ß:|t I/‹´ e|ø—õœtª9vüç¿}€>'Õ¿ÝâPæE™[÷Œ   0 3 703 7## 1 03 703 7 #  ,(;¼€€+/) / ¼€€',(;,(; +-' ¼€€ '+/ , ( ;"5 9 ! 621 62 1 %!3 9  5 7 "62 1 %! %6 2 1  "5 9 : )-& :)-:)- &; &:) -"81 2 2 6 ""   1 2 6#1 64 "-)&  & ): -:& &-) :'#73  3 0  # #" 7073 0#;. ( ' ¼€€ '¼€€(, ;(,& '¼€€'¼€€; ,*"62 1 # 61  "  " 2 1 62 ": -+& &  :)-')- &&¼€€:/ )#"0 75 03 7#   #3 790 3 #,( ;&<'¼€€'  ,; (; ¼€€ '¼€€',(½€€½€€ #4 8 $7 3 073 0$ 4 6 #2 8 73 0$  730#4 8;( , ' '¼€€:;(,;(,¼€€' ¼€€ ; (, "12 61 2 6" 0"1 2 6 1 2 6   - ) : &*. *,& -):-):( . &*.  -)::  *.&*. :&*.* . &:   95!%2 64 6 !95 "95 " 2 8 %!½€€%2 6 "9 5 (, ¼€€ /+/+¼€€* ,  (. /+< </ +(, 4 8 0  $ 04 84 8$ 0 $4 8 - ++/  +/ /)1 84  $  $! 4 % 8 $ $84 .;* *   ..* 7 5 %! %!5 95 9% !%!$ 93 /+ /   +/:+% 82 $ $  4 84 8 $ $ !6 4  .(*.  *., *95 !%! %$ 9 5 !%!%095 )- . *.* )/ + - .*  .*)-5 9 1  %%!5 95 9%! 1  1 ! 5 9; + / ''+/ ;'+/ +/;  #8 4  $3 7 3 9 $84 #84 #5 7   $3 7  84&oooooooo o o o o ooooooosphinx-2.0.4-release/test/test_188/refdata/rt_v3.meta0000644000176700017710000000004011634640227021613 0ustar deogardeogarSPRTsphinx-2.0.4-release/test/test_188/refdata/rt_v3.ram0000644000176700017710000002602011634640227021452 0ustar deogardeogar¯è娭ÏÓŸx䔑­Ÿg ½Ç ¸ðù  ±à‡ ÅÖq ©¬† ÅÉ‘ç‡ÉŽÇø¯ß—èñ®¼‚À Ñ߈ ½¹p ØŽš Í hŨŽ ´þ¿ ™ÍÉ’—÷Õçëïê½¶ ± xä« ½©ð Û¯†½¶î ¸úà ²žË Ó¯~ Ó߇ Ÿ­€ŧn ­ß˜¶ú ŸpÏßç½Èo Íà—Žï… ½×q±¬ö®áJ ­×À Š÷†™Ñ¯Ÿh –§‰ã‰À›¶ …¾9‰´ˆ ˜–õÅÙ‘ÑŸwÙùD »¨ñÝï»·ï îü‡ í¯ñ»·‘ ³ßg葚 æo ¯ ‡ ¶‚ ùqœ™Å²ÙR Õïe Ñ ˜ã÷Ž®¦Ó «’¿ Š·÷—´‰»Én ÏŸˆ Žðý »Öp§Ñþ˜×ŒÃØÓàh ÜËŽ ­àw–çøåÏ’ÃØî ˜Ô¨ ¯àx »ÆŽ ᤀ  Ñàg§‚ÃÈœ”ﳟ˜ Ã×p ¶×™ Ó — »¸q “Ìö އ‰­ ˆèµ/ƒ»Ò ’׆ÔùÏßwé õ’Ä öȈÍ߇Ä‚ÀݯzûÁ4 ³ß˜ êèî »¦Ž ÷Ëù  ꘇÑ gé÷¶’·‡±ŸhÜ”‘ÏŸw ÃÉ ¥ä¿ Èñ嫘À޹‰ ²ÞL áÍ© Žçˆ­àˆ“¥ÃÆo Ë«ø  Ñà˜òøö›Ï}ÍŸxî¨ù‰¹Î¥ÒɳŸg ¯†æ —ÒÇ–Ùø Ü« ­ w–‡ù×°þ±ß—Š×ö—Ô‰»©n Ï߈ ™ÎÈ þ®òÓŸ‡ò¸‡­´ùÕÈ  ðþ`ë‘Ö §Õ¸±àx äËŽ Ïàgî舗ˆí¯ßh–Lj´þ¿•Õ¶–™‰ ø€ç ÐŽ½¨o Í — ›É¹ ‰â·³ ˆŠ—‡èÖ¦ص/ƒº2 ®æT ∦ŒúQöˆùÏ ˜éîý êØöÓßx‘Ôø ½§º§æ Íàh ÅÈŽ£¡À ªŠÉ’÷ö³àw¥‘~ ާù¯Ÿ—÷¶¯ ­Ý¿Žùø ²™Õ õ˜ÿ ÏàwöØö©Ô‰ŦŽ ±àh ·ÉÈ Èñ‰¹ÕÇêÖø ä¬o ÓŸ—êˆù‡ˆ‰³ ˜’§ù‡·®‹ÕÉÍ ‡ Øîù íÊ. Ðþ`õ«ø  ¯ßxŽ÷ö›Îq¥ŸÀò¶‰ ÎÙ¬ ßÒI òèˆÓßhÕŽ Ï ˆä“q± — ½Æo «ã¿ ì¤ëÅ©n ÉËù  –—‡¯Ÿ‡™ñ‚ŠÇˆ³àgÌý¿ØõN û¹K Ò¦- ÞÊ• ‰‡¸ò¨ùÑ wù¸Ï£Ú¿òöø Ξ5 šÓÄ ³ßˆ »Çn ­¦À Æ…ÉîøöÍß—ù÷Ò–×ö­à˜¯Ôøèo °ÝÊ ÕïƒÂM ÏŸg Ò…˜ Їù±ŸxýÑç ³ŸwµÎ¹ çÝ×ÍŸh ö˜‡§¯Ñàˆ êȈ¼À»Ò¶ê–‰ ôž‘ ‰Ò¸ÏߘÜÌn ±ß‡ ’çˆëЂ ­ g Ž·‡“´ù»ÈŽ ‰¥…ôÍŽÑ ˆꨉíi»Ö ·´ˆ ˜™õ»Öq¯ — øðå ÃÇ ³ßw ¢ð… ÃØ‘¬öÒæª £ÐÀ öø†Þ®­Ι+ ݯž ­àg¡«€»¨Ž Óàx šú »·Þ×ðåïŠùp ÏŸ˜ܬo æÕ¯fùŽ ¨´Ù ³Ÿˆ»§nóˉ  î˜÷莖òÈøÑàwèõN ´ý¿ ¯àh öʘí Ïßg »Ù‘—¬† »ÆñíÇô  ­ ˜Ü“q Ó ‡ ÃÈo±Š µÍ¸î؆³Ôù±à—½¦o ¨‘†  ­Ÿw Å·‘ ­Ìö òˆ‰ÓŸhè±ÏÖÝ ½Ç𤓠͠x ½Ø Èïå Ñߘ ÅÉn ‡µÑ Øí®¯ß‡ù´¡˜ØŒ½×p­ßˆ äÌn Óß—êèøëÏrÀ™Î ý½@™‘‚ Ò¡3 ¥•¿ ö¸÷©´‰ÅÆŽ ± h ¢ïý ÅÙÏž˜½© ÑŸg š‚ ½¶‘¤—ÅÎÞ²Ûï… ¯Ÿx³Èôŧ‘Óï}Ÿ Òûç 㯑Ÿq Íà‡÷£ •Çò ÷³‡»È ð—ë §·—ÃÙn Ï«ù ÃÆŽ·Éëâ¾ß »©üšþ—¹Š»¶o Û° ç‘ ò‚ ׫öâþ ™³áðù»¹ óËø»¦ï­°•»¹ñ Èùà ÖI ¥°m Ž‹˜èq •ð ÷o±à÷»Çp ­ÐŽ »Øn׫‰ÊâÈ É͸ ç΂‰…uºÄù•šÃÖð ï³÷ 虇ÃÖŽÖš¿ ½§î«ð޽¸ð’û— ›°â ½¸Ž åî‚Å©p “°š ŶnÜš· ÖÚÐ £ðv Ô—˜Ê¥QËÊÉ ñ³ö½Æq «• ½ÙoÙÎý ÅÈ‘£íÅ×É«ˆ ÅÈï“ÐÅ×ñ¹³ž ½É‘ ÷¸® À‚^—Çë ÙÅǵÉòϳ÷ ¥·ŽÅØo ⾟ •¹“½·n õˉ ½¨Ž³ßüýÅ> åÉÇ’èŦ òú ‘„Úŧ´À«°iÊ¢O žçì ½©‘ ‘̆  ½¶ñ—ø´ÅÙðͳ÷ÅÆ ‹Ö¶ ÇÒ¶ÖÝÎ ŸÎ: ½ÈŽòØàûÁ5 µÏM ú"­¬‡  ÅÖåÐ~½Çî÷ºÑÛѶž¦Þ éѸŽƒ˜ Ũ﩯ý½¹‘ÁÉ™°Š]½¦q šßÀ  Ãב˳†ƒº3 ÀŽ> ’Å ÙÖÇó«ø »Æïé‡ïéîÄý¿ëÖɪ§ »§p —Ì÷  »¸˜Õ¤­ðrÊåÖ ’…è æŽ—ñþ»·ð÷Ó‡»¨ ù‡Í ÃÇ‘ «¬ö  ÃØñÛŽ}»É‰µ°ÅÕÇÖšW ƒ›ü öñÏÓöé‘ Ð˜É  ÇÖ¸²„ø »§ù‡‹ÃÈŽù´±õÖ¶Û‚ û¹J ÀŒF “¬‡  »ÙðíÍréÕ¶ªÚ. ¡ÑÚ ÃÇî«‘‚ »Öó³÷»Ép 釩 æq ¯Ì†  ù‘çï¼þ¿¥°‰¶¥¯ ‘ÒÍ ÅÆï‡»ÐëÒǪ· »±Ê½Èq •¬ö  ½×‘‡øÐŸÉÓ‡ŧp ÐÞ¸  £ð’¶â6 ®‚ˆ ½©îáð…Š]Ũ ©Ì÷  Å·ðÙ°‚½¦ŽÌ‚ÀÅÑÉ¢¶‰ ÷ÑÇÍ«ø ÅÉ•Ï ½Øñòñ¹ƒÂL Ín °ú"‹õƒ ½ÆŽ›z½Ù ѳ÷ è–‡½ÙnÙŽþ ÅÈ £Ðî Å׎髉¶å( çʸ ¢­¾ªš© «° ²Œø½§‘ ›ðá ½¸ªØò“ð™Ŷo ÍËøÅ©£°uŶ‘ ¥ÔÞ ½¨q Þ‚ é«ö…ÆG Éɶ¢¨Ŧïôœž¹·êŹ Þ¾ß ‰¹÷½ÖŽ ñ«ù ½Éî™Ç‹ÚÖÊ À€&»É’ ɳ‡ÅÇp Ìλ Ά ®Šˆ»©p Þú ’è»·óøŽ Ëˉ ç¯»Èï›Ç’ñ³÷ ‹¹î»× çÐý ÃÆq ¹¼Œ ¹É‹ ‡³žè×þ»Øo÷«ˆ »ÇÐa»Ø‘ çŽ ¶¢± åÍÉ ϳöÃÉ‘ ¥õ ÃÖ±¡ˆ»¦ °ú »¹Žä”·ªÝ0 ­ð– –èî莥ðn÷ ®ü÷ •°‚÷n Œ   0 3 703 7## 1 03 703 7 #  ,(;¼€€+/) / ¼€€',(;,(; +-' ¼€€ '+/ , ( ;"5 9 ! 621 62 1 %!3 9  5 7 "62 1 %! %6 2 1  "5 9 : )-& :)-:)- &; &:) -"81 2 2 6 ""   1 2 6#1 64 "-)&  & ): -:& &-) :'#73  3 0  # #" 7073 0#;. ( ' ¼€€ '¼€€(, ;(,& '¼€€'¼€€; ,*"62 1 # 61  "  " 2 1 62 ": -+& &  :)-')- &&¼€€:/ )#"0 75 03 7#   #3 790 3 #,( ;&<'¼€€'  ,; (; ¼€€ '¼€€',(½€€½€€ #4 8 $7 3 073 0$ 4 6 #2 8 73 0$  730#4 8;( , ' '¼€€:;(,;(,¼€€' ¼€€ ; (, "12 61 2 6" 0"1 2 6 1 2 6   - ) : &*. *,& -):-):( . &*.  -)::  *.&*. :&*.* . &:   95!%2 64 6 !95 "95 " 2 8 %!½€€%2 6 "9 5 (, ¼€€ /+/+¼€€* ,  (. /+< </ +(, 4 8 0  $ 04 84 8$ 0 $4 8 - ++/  +/ /)1 84  $  $! 4 % 8 $ $84 .;* *   ..* 7 5 %! %!5 95 9% !%!$ 93 /+ /   +/:+% 82 $ $  4 84 8 $ $ !6 4  .(*.  *., *95 !%! %$ 9 5 !%!%095 )- . *.* )/ + - .*  .*)-5 9 1  %%!5 95 9%! 1  1 ! 5 9; + / ''+/ ;'+/ +/;  #8 4  $3 7 3 9 $84 #84 #5 7   $3 7  84&oooooooo o o o o ooooooosphinx-2.0.4-release/test/test_188/refdata/rt_v2.kill0000644000176700017710000000000411634640227021617 0ustar deogardeogarsphinx-2.0.4-release/test/test_188/refdata/rt_v4.ram0000644000176700017710000002602011634640227021453 0ustar deogardeogar¯è娭ÏÓŸx䔑­Ÿg ½Ç ¸ðù  ±à‡ ÅÖq ©¬† ÅÉ‘ç‡ÉŽÇø¯ß—èñ®¼‚À Ñ߈ ½¹p ØŽš Í hŨŽ ´þ¿ ™ÍÉ’—÷Õçëïê½¶ ± xä« ½©ð Û¯†½¶î ¸úà ²žË Ó¯~ Ó߇ Ÿ­€ŧn ­ß˜¶ú ŸpÏßç½Èo Íà—Žï… ½×q±¬ö®áJ ­×À Š÷†™Ñ¯Ÿh –§‰ã‰À›¶ …¾9‰´ˆ ˜–õÅÙ‘ÑŸwÙùD »¨ñÝï»·ï îü‡ í¯ñ»·‘ ³ßg葚 æo ¯ ‡ ¶‚ ùqœ™Å²ÙR Õïe Ñ ˜ã÷Ž®¦Ó «’¿ Š·÷—´‰»Én ÏŸˆ Žðý »Öp§Ñþ˜×ŒÃØÓàh ÜËŽ ­àw–çøåÏ’ÃØî ˜Ô¨ ¯àx »ÆŽ ᤀ  Ñàg§‚ÃÈœ”ﳟ˜ Ã×p ¶×™ Ó — »¸q “Ìö އ‰­ ˆèµ/ƒ»Ò ’׆ÔùÏßwé õ’Ä öȈÍ߇Ä‚ÀݯzûÁ4 ³ß˜ êèî »¦Ž ÷Ëù  ꘇÑ gé÷¶’·‡±ŸhÜ”‘ÏŸw ÃÉ ¥ä¿ Èñ嫘À޹‰ ²ÞL áÍ© Žçˆ­àˆ“¥ÃÆo Ë«ø  Ñà˜òøö›Ï}ÍŸxî¨ù‰¹Î¥ÒɳŸg ¯†æ —ÒÇ–Ùø Ü« ­ w–‡ù×°þ±ß—Š×ö—Ô‰»©n Ï߈ ™ÎÈ þ®òÓŸ‡ò¸‡­´ùÕÈ  ðþ`ë‘Ö §Õ¸±àx äËŽ Ïàgî舗ˆí¯ßh–Lj´þ¿•Õ¶–™‰ ø€ç ÐŽ½¨o Í — ›É¹ ‰â·³ ˆŠ—‡èÖ¦ص/ƒº2 ®æT ∦ŒúQöˆùÏ ˜éîý êØöÓßx‘Ôø ½§º§æ Íàh ÅÈŽ£¡À ªŠÉ’÷ö³àw¥‘~ ާù¯Ÿ—÷¶¯ ­Ý¿Žùø ²™Õ õ˜ÿ ÏàwöØö©Ô‰ŦŽ ±àh ·ÉÈ Èñ‰¹ÕÇêÖø ä¬o ÓŸ—êˆù‡ˆ‰³ ˜’§ù‡·®‹ÕÉÍ ‡ Øîù íÊ. Ðþ`õ«ø  ¯ßxŽ÷ö›Îq¥ŸÀò¶‰ ÎÙ¬ ßÒI òèˆÓßhÕŽ Ï ˆä“q± — ½Æo «ã¿ ì¤ëÅ©n ÉËù  –—‡¯Ÿ‡™ñ‚ŠÇˆ³àgÌý¿ØõN û¹K Ò¦- ÞÊ• ‰‡¸ò¨ùÑ wù¸Ï£Ú¿òöø Ξ5 šÓÄ ³ßˆ »Çn ­¦À Æ…ÉîøöÍß—ù÷Ò–×ö­à˜¯Ôøèo °ÝÊ ÕïƒÂM ÏŸg Ò…˜ Їù±ŸxýÑç ³ŸwµÎ¹ çÝ×ÍŸh ö˜‡§¯Ñàˆ êȈ¼À»Ò¶ê–‰ ôž‘ ‰Ò¸ÏߘÜÌn ±ß‡ ’çˆëЂ ­ g Ž·‡“´ù»ÈŽ ‰¥…ôÍŽÑ ˆꨉíi»Ö ·´ˆ ˜™õ»Öq¯ — øðå ÃÇ ³ßw ¢ð… ÃØ‘¬öÒæª £ÐÀ öø†Þ®­Ι+ ݯž ­àg¡«€»¨Ž Óàx šú »·Þ×ðåïŠùp ÏŸ˜ܬo æÕ¯fùŽ ¨´Ù ³Ÿˆ»§nóˉ  î˜÷莖òÈøÑàwèõN ´ý¿ ¯àh öʘí Ïßg »Ù‘—¬† »ÆñíÇô  ­ ˜Ü“q Ó ‡ ÃÈo±Š µÍ¸î؆³Ôù±à—½¦o ¨‘†  ­Ÿw Å·‘ ­Ìö òˆ‰ÓŸhè±ÏÖÝ ½Ç𤓠͠x ½Ø Èïå Ñߘ ÅÉn ‡µÑ Øí®¯ß‡ù´¡˜ØŒ½×p­ßˆ äÌn Óß—êèøëÏrÀ™Î ý½@™‘‚ Ò¡3 ¥•¿ ö¸÷©´‰ÅÆŽ ± h ¢ïý ÅÙÏž˜½© ÑŸg š‚ ½¶‘¤—ÅÎÞ²Ûï… ¯Ÿx³Èôŧ‘Óï}Ÿ Òûç 㯑Ÿq Íà‡÷£ •Çò ÷³‡»È ð—ë §·—ÃÙn Ï«ù ÃÆŽ·Éëâ¾ß »©üšþ—¹Š»¶o Û° ç‘ ò‚ ׫öâþ ™³áðù»¹ óËø»¦ï­°•»¹ñ Èùà ÖI ¥°m Ž‹˜èq •ð ÷o±à÷»Çp ­ÐŽ »Øn׫‰ÊâÈ É͸ ç΂‰…uºÄù•šÃÖð ï³÷ 虇ÃÖŽÖš¿ ½§î«ð޽¸ð’û— ›°â ½¸Ž åî‚Å©p “°š ŶnÜš· ÖÚÐ £ðv Ô—˜Ê¥QËÊÉ ñ³ö½Æq «• ½ÙoÙÎý ÅÈ‘£íÅ×É«ˆ ÅÈï“ÐÅ×ñ¹³ž ½É‘ ÷¸® À‚^—Çë ÙÅǵÉòϳ÷ ¥·ŽÅØo ⾟ •¹“½·n õˉ ½¨Ž³ßüýÅ> åÉÇ’èŦ òú ‘„Úŧ´À«°iÊ¢O žçì ½©‘ ‘̆  ½¶ñ—ø´ÅÙðͳ÷ÅÆ ‹Ö¶ ÇÒ¶ÖÝÎ ŸÎ: ½ÈŽòØàûÁ5 µÏM ú"­¬‡  ÅÖåÐ~½Çî÷ºÑÛѶž¦Þ éѸŽƒ˜ Ũ﩯ý½¹‘ÁÉ™°Š]½¦q šßÀ  Ãב˳†ƒº3 ÀŽ> ’Å ÙÖÇó«ø »Æïé‡ïéîÄý¿ëÖɪ§ »§p —Ì÷  »¸˜Õ¤­ðrÊåÖ ’…è æŽ—ñþ»·ð÷Ó‡»¨ ù‡Í ÃÇ‘ «¬ö  ÃØñÛŽ}»É‰µ°ÅÕÇÖšW ƒ›ü öñÏÓöé‘ Ð˜É  ÇÖ¸²„ø »§ù‡‹ÃÈŽù´±õÖ¶Û‚ û¹J ÀŒF “¬‡  »ÙðíÍréÕ¶ªÚ. ¡ÑÚ ÃÇî«‘‚ »Öó³÷»Ép 釩 æq ¯Ì†  ù‘çï¼þ¿¥°‰¶¥¯ ‘ÒÍ ÅÆï‡»ÐëÒǪ· »±Ê½Èq •¬ö  ½×‘‡øÐŸÉÓ‡ŧp ÐÞ¸  £ð’¶â6 ®‚ˆ ½©îáð…Š]Ũ ©Ì÷  Å·ðÙ°‚½¦ŽÌ‚ÀÅÑÉ¢¶‰ ÷ÑÇÍ«ø ÅÉ•Ï ½Øñòñ¹ƒÂL Ín °ú"‹õƒ ½ÆŽ›z½Ù ѳ÷ è–‡½ÙnÙŽþ ÅÈ £Ðî Å׎髉¶å( çʸ ¢­¾ªš© «° ²Œø½§‘ ›ðá ½¸ªØò“ð™Ŷo ÍËøÅ©£°uŶ‘ ¥ÔÞ ½¨q Þ‚ é«ö…ÆG Éɶ¢¨Ŧïôœž¹·êŹ Þ¾ß ‰¹÷½ÖŽ ñ«ù ½Éî™Ç‹ÚÖÊ À€&»É’ ɳ‡ÅÇp Ìλ Ά ®Šˆ»©p Þú ’è»·óøŽ Ëˉ ç¯»Èï›Ç’ñ³÷ ‹¹î»× çÐý ÃÆq ¹¼Œ ¹É‹ ‡³žè×þ»Øo÷«ˆ »ÇÐa»Ø‘ çŽ ¶¢± åÍÉ ϳöÃÉ‘ ¥õ ÃÖ±¡ˆ»¦ °ú »¹Žä”·ªÝ0 ­ð– –èî莥ðn÷ ®ü÷ •°‚÷n Œ   0 3 703 7## 1 03 703 7 #  ,(;¼€€+/) / ¼€€',(;,(; +-' ¼€€ '+/ , ( ;"5 9 ! 621 62 1 %!3 9  5 7 "62 1 %! %6 2 1  "5 9 : )-& :)-:)- &; &:) -"81 2 2 6 ""   1 2 6#1 64 "-)&  & ): -:& &-) :'#73  3 0  # #" 7073 0#;. ( ' ¼€€ '¼€€(, ;(,& '¼€€'¼€€; ,*"62 1 # 61  "  " 2 1 62 ": -+& &  :)-')- &&¼€€:/ )#"0 75 03 7#   #3 790 3 #,( ;&<'¼€€'  ,; (; ¼€€ '¼€€',(½€€½€€ #4 8 $7 3 073 0$ 4 6 #2 8 73 0$  730#4 8;( , ' '¼€€:;(,;(,¼€€' ¼€€ ; (, "12 61 2 6" 0"1 2 6 1 2 6   - ) : &*. *,& -):-):( . &*.  -)::  *.&*. :&*.* . &:   95!%2 64 6 !95 "95 " 2 8 %!½€€%2 6 "9 5 (, ¼€€ /+/+¼€€* ,  (. /+< </ +(, 4 8 0  $ 04 84 8$ 0 $4 8 - ++/  +/ /)1 84  $  $! 4 % 8 $ $84 .;* *   ..* 7 5 %! %!5 95 9% !%!$ 93 /+ /   +/:+% 82 $ $  4 84 8 $ $ !6 4  .(*.  *., *95 !%! %$ 9 5 !%!%095 )- . *.* )/ + - .*  .*)-5 9 1  %%!5 95 9%! 1  1 ! 5 9; + / ''+/ ;'+/ +/;  #8 4  $3 7 3 9 $84 #84 #5 7   $3 7  84&oooooooo o o o o ooooooosphinx-2.0.4-release/test/test_188/refdata/rt_v4.meta0000644000176700017710000000037111634640227021623 0ustar deogardeogarSPRTcontentÿÿÿÿÿÿÿÿÿÿÿÿidd sphinx-2.0.4-release/test/test_188/refdata/rt_v5.kill0000644000176700017710000000000411634640227021622 0ustar deogardeogarsphinx-2.0.4-release/test/test_188/refdata/rt_v2.ram0000644000176700017710000002601011634640227021450 0ustar deogardeogar¯è娭ÏÓŸx䔑­Ÿg ½Ç ¸ðù  ±à‡ ÅÖq ©¬† ÅÉ‘ç‡ÉŽÇø¯ß—èñ®¼‚À Ñ߈ ½¹p ØŽš Í hŨŽ ´þ¿ ™ÍÉ’—÷Õçëïê½¶ ± xä« ½©ð Û¯†½¶î ¸úà ²žË Ó¯~ Ó߇ Ÿ­€ŧn ­ß˜¶ú ŸpÏßç½Èo Íà—Žï… ½×q±¬ö®áJ ­×À Š÷†™Ñ¯Ÿh –§‰ã‰À›¶ …¾9‰´ˆ ˜–õÅÙ‘ÑŸwÙùD »¨ñÝï»·ï îü‡ í¯ñ»·‘ ³ßg葚 æo ¯ ‡ ¶‚ ùqœ™Å²ÙR Õïe Ñ ˜ã÷Ž®¦Ó «’¿ Š·÷—´‰»Én ÏŸˆ Žðý »Öp§Ñþ˜×ŒÃØÓàh ÜËŽ ­àw–çøåÏ’ÃØî ˜Ô¨ ¯àx »ÆŽ ᤀ  Ñàg§‚ÃÈœ”ﳟ˜ Ã×p ¶×™ Ó — »¸q “Ìö އ‰­ ˆèµ/ƒ»Ò ’׆ÔùÏßwé õ’Ä öȈÍ߇Ä‚ÀݯzûÁ4 ³ß˜ êèî »¦Ž ÷Ëù  ꘇÑ gé÷¶’·‡±ŸhÜ”‘ÏŸw ÃÉ ¥ä¿ Èñ嫘À޹‰ ²ÞL áÍ© Žçˆ­àˆ“¥ÃÆo Ë«ø  Ñà˜òøö›Ï}ÍŸxî¨ù‰¹Î¥ÒɳŸg ¯†æ —ÒÇ–Ùø Ü« ­ w–‡ù×°þ±ß—Š×ö—Ô‰»©n Ï߈ ™ÎÈ þ®òÓŸ‡ò¸‡­´ùÕÈ  ðþ`ë‘Ö §Õ¸±àx äËŽ Ïàgî舗ˆí¯ßh–Lj´þ¿•Õ¶–™‰ ø€ç ÐŽ½¨o Í — ›É¹ ‰â·³ ˆŠ—‡èÖ¦ص/ƒº2 ®æT ∦ŒúQöˆùÏ ˜éîý êØöÓßx‘Ôø ½§º§æ Íàh ÅÈŽ£¡À ªŠÉ’÷ö³àw¥‘~ ާù¯Ÿ—÷¶¯ ­Ý¿Žùø ²™Õ õ˜ÿ ÏàwöØö©Ô‰ŦŽ ±àh ·ÉÈ Èñ‰¹ÕÇêÖø ä¬o ÓŸ—êˆù‡ˆ‰³ ˜’§ù‡·®‹ÕÉÍ ‡ Øîù íÊ. Ðþ`õ«ø  ¯ßxŽ÷ö›Îq¥ŸÀò¶‰ ÎÙ¬ ßÒI òèˆÓßhÕŽ Ï ˆä“q± — ½Æo «ã¿ ì¤ëÅ©n ÉËù  –—‡¯Ÿ‡™ñ‚ŠÇˆ³àgÌý¿ØõN û¹K Ò¦- ÞÊ• ‰‡¸ò¨ùÑ wù¸Ï£Ú¿òöø Ξ5 šÓÄ ³ßˆ »Çn ­¦À Æ…ÉîøöÍß—ù÷Ò–×ö­à˜¯Ôøèo °ÝÊ ÕïƒÂM ÏŸg Ò…˜ Їù±ŸxýÑç ³ŸwµÎ¹ çÝ×ÍŸh ö˜‡§¯Ñàˆ êȈ¼À»Ò¶ê–‰ ôž‘ ‰Ò¸ÏߘÜÌn ±ß‡ ’çˆëЂ ­ g Ž·‡“´ù»ÈŽ ‰¥…ôÍŽÑ ˆꨉíi»Ö ·´ˆ ˜™õ»Öq¯ — øðå ÃÇ ³ßw ¢ð… ÃØ‘¬öÒæª £ÐÀ öø†Þ®­Ι+ ݯž ­àg¡«€»¨Ž Óàx šú »·Þ×ðåïŠùp ÏŸ˜ܬo æÕ¯fùŽ ¨´Ù ³Ÿˆ»§nóˉ  î˜÷莖òÈøÑàwèõN ´ý¿ ¯àh öʘí Ïßg »Ù‘—¬† »ÆñíÇô  ­ ˜Ü“q Ó ‡ ÃÈo±Š µÍ¸î؆³Ôù±à—½¦o ¨‘†  ­Ÿw Å·‘ ­Ìö òˆ‰ÓŸhè±ÏÖÝ ½Ç𤓠͠x ½Ø Èïå Ñߘ ÅÉn ‡µÑ Øí®¯ß‡ù´¡˜ØŒ½×p­ßˆ äÌn Óß—êèøëÏrÀ™Î ý½@™‘‚ Ò¡3 ¥•¿ ö¸÷©´‰ÅÆŽ ± h ¢ïý ÅÙÏž˜½© ÑŸg š‚ ½¶‘¤—ÅÎÞ²Ûï… ¯Ÿx³Èôŧ‘Óï}Ÿ Òûç 㯑Ÿq Íà‡÷£ •Çò ÷³‡»È ð—ë §·—ÃÙn Ï«ù ÃÆŽ·Éëâ¾ß »©üšþ—¹Š»¶o Û° ç‘ ò‚ ׫öâþ ™³áðù»¹ óËø»¦ï­°•»¹ñ Èùà ÖI ¥°m Ž‹˜èq •ð ÷o±à÷»Çp ­ÐŽ »Øn׫‰ÊâÈ É͸ ç΂‰…uºÄù•šÃÖð ï³÷ 虇ÃÖŽÖš¿ ½§î«ð޽¸ð’û— ›°â ½¸Ž åî‚Å©p “°š ŶnÜš· ÖÚÐ £ðv Ô—˜Ê¥QËÊÉ ñ³ö½Æq «• ½ÙoÙÎý ÅÈ‘£íÅ×É«ˆ ÅÈï“ÐÅ×ñ¹³ž ½É‘ ÷¸® À‚^—Çë ÙÅǵÉòϳ÷ ¥·ŽÅØo ⾟ •¹“½·n õˉ ½¨Ž³ßüýÅ> åÉÇ’èŦ òú ‘„Úŧ´À«°iÊ¢O žçì ½©‘ ‘̆  ½¶ñ—ø´ÅÙðͳ÷ÅÆ ‹Ö¶ ÇÒ¶ÖÝÎ ŸÎ: ½ÈŽòØàûÁ5 µÏM ú"­¬‡  ÅÖåÐ~½Çî÷ºÑÛѶž¦Þ éѸŽƒ˜ Ũ﩯ý½¹‘ÁÉ™°Š]½¦q šßÀ  Ãב˳†ƒº3 ÀŽ> ’Å ÙÖÇó«ø »Æïé‡ïéîÄý¿ëÖɪ§ »§p —Ì÷  »¸˜Õ¤­ðrÊåÖ ’…è æŽ—ñþ»·ð÷Ó‡»¨ ù‡Í ÃÇ‘ «¬ö  ÃØñÛŽ}»É‰µ°ÅÕÇÖšW ƒ›ü öñÏÓöé‘ Ð˜É  ÇÖ¸²„ø »§ù‡‹ÃÈŽù´±õÖ¶Û‚ û¹J ÀŒF “¬‡  »ÙðíÍréÕ¶ªÚ. ¡ÑÚ ÃÇî«‘‚ »Öó³÷»Ép 釩 æq ¯Ì†  ù‘çï¼þ¿¥°‰¶¥¯ ‘ÒÍ ÅÆï‡»ÐëÒǪ· »±Ê½Èq •¬ö  ½×‘‡øÐŸÉÓ‡ŧp ÐÞ¸  £ð’¶â6 ®‚ˆ ½©îáð…Š]Ũ ©Ì÷  Å·ðÙ°‚½¦ŽÌ‚ÀÅÑÉ¢¶‰ ÷ÑÇÍ«ø ÅÉ•Ï ½Øñòñ¹ƒÂL Ín °ú"‹õƒ ½ÆŽ›z½Ù ѳ÷ è–‡½ÙnÙŽþ ÅÈ £Ðî Å׎髉¶å( çʸ ¢­¾ªš© «° ²Œø½§‘ ›ðá ½¸ªØò“ð™Ŷo ÍËøÅ©£°uŶ‘ ¥ÔÞ ½¨q Þ‚ é«ö…ÆG Éɶ¢¨Ŧïôœž¹·êŹ Þ¾ß ‰¹÷½ÖŽ ñ«ù ½Éî™Ç‹ÚÖÊ À€&»É’ ɳ‡ÅÇp Ìλ Ά ®Šˆ»©p Þú ’è»·óøŽ Ëˉ ç¯»Èï›Ç’ñ³÷ ‹¹î»× çÐý ÃÆq ¹¼Œ ¹É‹ ‡³žè×þ»Øo÷«ˆ »ÇÐa»Ø‘ çŽ ¶¢± åÍÉ ϳöÃÉ‘ ¥õ ÃÖ±¡ˆ»¦ °ú »¹Žä”·ªÝ0 ­ð– –èî莥ðn÷ ®ü÷ •°‚÷n Œ   0 3 703 7## 1 03 703 7 #  ,(;¼€€+/) / ¼€€',(;,(; +-' ¼€€ '+/ , ( ;"5 9 ! 621 62 1 %!3 9  5 7 "62 1 %! %6 2 1  "5 9 : )-& :)-:)- &; &:) -"81 2 2 6 ""   1 2 6#1 64 "-)&  & ): -:& &-) :'#73  3 0  # #" 7073 0#;. ( ' ¼€€ '¼€€(, ;(,& '¼€€'¼€€; ,*"62 1 # 61  "  " 2 1 62 ": -+& &  :)-')- &&¼€€:/ )#"0 75 03 7#   #3 790 3 #,( ;&<'¼€€'  ,; (; ¼€€ '¼€€',(½€€½€€ #4 8 $7 3 073 0$ 4 6 #2 8 73 0$  730#4 8;( , ' '¼€€:;(,;(,¼€€' ¼€€ ; (, "12 61 2 6" 0"1 2 6 1 2 6   - ) : &*. *,& -):-):( . &*.  -)::  *.&*. :&*.* . &:   95!%2 64 6 !95 "95 " 2 8 %!½€€%2 6 "9 5 (, ¼€€ /+/+¼€€* ,  (. /+< </ +(, 4 8 0  $ 04 84 8$ 0 $4 8 - ++/  +/ /)1 84  $  $! 4 % 8 $ $84 .;* *   ..* 7 5 %! %!5 95 9% !%!$ 93 /+ /   +/:+% 82 $ $  4 84 8 $ $ !6 4  .(*.  *., *95 !%! %$ 9 5 !%!%095 )- . *.* )/ + - .*  .*)-5 9 1  %%!5 95 9%! 1  1 ! 5 9; + / ''+/ ;'+/ +/;  #8 4  $3 7 3 9 $84 #84 #5 7   $3 7  84&oooooooo o o o o ooooooosphinx-2.0.4-release/test/test_188/refdata/rt_v3.kill0000644000176700017710000000000411634640227021620 0ustar deogardeogarsphinx-2.0.4-release/test/test_188/test.xml0000644000176700017710000000755711634664540020031 0ustar deogardeogar RT: index format backwards compatibility searchd { workers = threads } index v2 { type = rt path = data/rt_v2 rt_field = content rt_attr_uint = idd } index v3 { type = rt path = data/rt_v3 rt_field = content rt_attr_uint = idd } index v4 { type = rt path = data/rt_v4 rt_field = content rt_attr_uint = idd } index v5 { type = rt path = data/rt_v5 rt_field = content rt_attr_uint = idd } SetMatchMode ( SPH_MATCH_EXTENDED2 ); } else { $results[] = sprintf("start.err=%d; local=%s; client=%s;", $startSta, $error, $client->GetLastError()); } return $results; '); $query = create_function ( '$q, $sock',' $results = array( $q ); $res = @mysql_query ( $q, $sock ); if ( $res===false ) { $results[] = mysql_errno( $sock ) . "; " . mysql_error ( $sock ); } else { while ($row = @mysql_fetch_array($res, MYSQL_ASSOC)) { if ( array_key_exists ( "Variable_name", $row ) && $row["Variable_name"]=="time" ) { continue; } if ( !array_key_exists ( "Variable_name", $row ) || !array_key_exists ( "Value", $row ) ) { $line = ""; foreach ( $row as $k=>$v ) { $line .= $k . " = " . $v . "\t"; } $results[] = $line; } else { $results[] = $row["Variable_name"] . " = " . $row["Value"]; } } @mysql_free_result ( $res ); } return $results; '); $insert = create_function ( '$idx, $from, $to, $query, $sock',' $insert = "REPLACE INTO $idx ( id, idd, content ) VALUES"; $lastword = 1; for ( $doc=$from; $doc<$to; $doc++ ) { $content = ""; for ( $word=$lastword; $word<$lastword+60; $word++ ) { $content .= "test_$word "; } $lastword += 40; if ( ( $doc%7 )==0 ) { $content .= "test700"; } else if ( ( $doc%19 )==0 ) { $content .= "test1800"; } $insert .= " ( $doc, 111, \'$content\' ),"; } $res = $query ( substr ( $insert, 0, -1 ), $sock ); // remove trailing , return array(); '); $open = create_function ( '', ' global $sd_address, $sd_sphinxql_port; $sockStr = "$sd_address:$sd_sphinxql_port"; if ($sd_address == "localhost") $sockStr = "127.0.0.1:$sd_sphinxql_port"; $sock = @mysql_connect ($sockStr,"","", true ); return $sock; '); $close = create_function ( '$sock', '@mysql_close ( $sock );' ); $results = array (); $sock = $open (); $results = array_merge ( $results, $query ( "select * from v2 where match ( 'test700 | test1800' )", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $results = array_merge ( $results, $query ( "select * from v3 where match ( 'test700 | test1800' )", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $results = array_merge ( $results, $query ( "select * from v4 where match ( 'test700 | test1800' )", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $results = array_merge ( $results, $query ( "select * from v5 where match ( 'test700 | test1800' )", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $close ( $sock ); $results = array_merge ( $results, $restartDaemon ( $client ) ); $sock = $open (); $results = array_merge ( $results, $insert ( "v5", 1, 20, $query, $sock ) ); $results = array_merge ( $results, $query ( "select * from v5 where match ( 'test700 | test1800')", $sock ) ); $results = array_merge ( $results, $query ( "show meta", $sock ) ); $close ( $sock ); ]]> sphinx-2.0.4-release/test/test_188/model.bin0000644000176700017710000000423411634640227020103 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:66:{i:0;s:53:"select * from v2 where match ( 'test700 | test1800' )";i:1;s:32:"id = 19 weight = 1611 idd = 111 ";i:2;s:31:"id = 7 weight = 1583 idd = 111 ";i:3;s:32:"id = 14 weight = 1583 idd = 111 ";i:4;s:9:"show meta";i:5;s:9:"total = 3";i:6;s:15:"total_found = 3";i:7;s:20:"keyword[0] = test700";i:8;s:11:"docs[0] = 2";i:9;s:11:"hits[0] = 2";i:10;s:21:"keyword[1] = test1800";i:11;s:11:"docs[1] = 1";i:12;s:11:"hits[1] = 1";i:13;s:53:"select * from v3 where match ( 'test700 | test1800' )";i:14;s:32:"id = 19 weight = 1611 idd = 111 ";i:15;s:31:"id = 7 weight = 1583 idd = 111 ";i:16;s:32:"id = 14 weight = 1583 idd = 111 ";i:17;s:9:"show meta";i:18;s:9:"total = 3";i:19;s:15:"total_found = 3";i:20;s:20:"keyword[0] = test700";i:21;s:11:"docs[0] = 2";i:22;s:11:"hits[0] = 2";i:23;s:21:"keyword[1] = test1800";i:24;s:11:"docs[1] = 1";i:25;s:11:"hits[1] = 1";i:26;s:53:"select * from v4 where match ( 'test700 | test1800' )";i:27;s:32:"id = 19 weight = 1611 idd = 111 ";i:28;s:31:"id = 7 weight = 1583 idd = 111 ";i:29;s:32:"id = 14 weight = 1583 idd = 111 ";i:30;s:9:"show meta";i:31;s:9:"total = 3";i:32;s:15:"total_found = 3";i:33;s:20:"keyword[0] = test700";i:34;s:11:"docs[0] = 2";i:35;s:11:"hits[0] = 2";i:36;s:21:"keyword[1] = test1800";i:37;s:11:"docs[1] = 1";i:38;s:11:"hits[1] = 1";i:39;s:53:"select * from v5 where match ( 'test700 | test1800' )";i:40;s:32:"id = 19 weight = 1611 idd = 111 ";i:41;s:31:"id = 7 weight = 1583 idd = 111 ";i:42;s:32:"id = 14 weight = 1583 idd = 111 ";i:43;s:9:"show meta";i:44;s:9:"total = 3";i:45;s:15:"total_found = 3";i:46;s:20:"keyword[0] = test700";i:47;s:11:"docs[0] = 2";i:48;s:11:"hits[0] = 2";i:49;s:21:"keyword[1] = test1800";i:50;s:11:"docs[1] = 1";i:51;s:11:"hits[1] = 1";i:52;s:10:"started=ok";i:53;s:52:"select * from v5 where match ( 'test700 | test1800')";i:54;s:32:"id = 19 weight = 1611 idd = 111 ";i:55;s:31:"id = 7 weight = 1583 idd = 111 ";i:56;s:32:"id = 14 weight = 1583 idd = 111 ";i:57;s:9:"show meta";i:58;s:9:"total = 3";i:59;s:15:"total_found = 3";i:60;s:20:"keyword[0] = test700";i:61;s:11:"docs[0] = 2";i:62;s:11:"hits[0] = 2";i:63;s:21:"keyword[1] = test1800";i:64;s:11:"docs[1] = 1";i:65;s:11:"hits[1] = 1";}}}sphinx-2.0.4-release/test/test_042/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_042/test.xml0000644000176700017710000000225311421075337017774 0ustar deogardeogar persistent connections indexer { mem_limit = 16M } searchd { client_timeout = 5 client_timeout = 1 workers = none workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT id, body FROM test_table } index test_idx { source = srctest path = /test } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test' ) Open(); $results [] = $client->Query ( 'test' ); $results [] = $client->GetLastError(); sleep ( 2 ); $results [] = $client->Query ( 'test' ); $results [] = $client->GetLastError(); $results [] = $client->Close(); foreach ( $results as &$r ) if ( is_array($r) ) unset ( $r['time'] ); ]]> sphinx-2.0.4-release/test/test_042/model.bin0000644000176700017710000000521611327565006020071 0ustar deogardeogara:4:{i:0;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:1;}}i:1;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:0;}}i:2;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:1;}}i:3;a:1:{i:0;a:6:{i:0;b:1;i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;s:0:"";i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:4;s:0:"";i:5;b:0;}}}sphinx-2.0.4-release/test/test_069/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_069/test.xml0000644000176700017710000000213311224002033017762 0ustar deogardeogar overrides indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT id, gid, body FROM test_table sql_attr_uint = gid } index test1 { source = srctest path = /test1 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `gid` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 10, 'test' ), ( 2, 20, 'test' ), ( 3, 30, 'test' ), ( 4, 40, 'test' ), ( 5, 50, 'test' ) $client->SetOverride ( "gid", SPH_ATTR_INTEGER, array ( 1=>123, 3=>15 ) ); $client->SetSortMode ( SPH_SORT_ATTR_DESC, "gid" ); $res = $client->Query ( "test", "test1" ); if ( $res ) { unset ( $res["time"] ); $results = array ( $res ); } else $results = array ( $client->GetLastError() ); sphinx-2.0.4-release/test/test_069/model.bin0000644000176700017710000000120011224002033020045 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:1:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:3:"123";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"50";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"40";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"20";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"gid";s:2:"15";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}}}}}}sphinx-2.0.4-release/test/test_161/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_161/data1.xml0000644000176700017710000000073311504617436020016 0ustar deogardeogar test flowing one 1 test of flore two 2 another fast doc 33 doc number four 40 sphinx-2.0.4-release/test/test_161/test.xml0000644000176700017710000000170511504617436020003 0ustar deogardeogar xmlpipe2 indexing vs prefix search indexer { mem_limit = 16M } searchd { } source src { type = xmlpipe2 xmlpipe_command = cat test_161/data1.xml xmlpipe_field = title xmlpipe_attr_uint = paper_id } index test { source = src path = /test charset_type = utf-8 min_prefix_len = 1 enable_star = 1 dict = crc dict = keywords } flo* fa* | te* f* f* !t* sphinx-2.0.4-release/test/test_161/model.bin0000644000176700017710000001443111504617436020074 0ustar deogardeogara:2:{i:0;a:5:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"1557";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1557";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"flo*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"flo*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1528";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1528";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1597";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"fa*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"te*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"fa* | te*";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:4:"1304";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:2:"f*";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"f*";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:4:"1402";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:4:"1402";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"f*";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:2:"t*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"f* !t*";}}i:1;a:5:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"1597";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1597";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:7:"flowing";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"flore";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"flo*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1528";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1528";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1597";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:4:"fast";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"fa* | te*";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:1:{s:8:"paper_id";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:4:"1548";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:7:"flowing";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"flore";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"fast";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"f*";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:8:"paper_id";i:1;}s:7:"matches";a:2:{i:3;a:2:{s:6:"weight";s:4:"1532";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"33";}}i:4;a:2:{s:6:"weight";s:4:"1532";s:5:"attrs";a:1:{s:8:"paper_id";s:2:"40";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:6:{s:7:"flowing";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"flore";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"fast";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"f* !t*";}}}sphinx-2.0.4-release/test/test_016/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_016/test.xml0000644000176700017710000000220711323636205017772 0ustar deogardeogar expr sorting vs filters indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id, group_id, body FROM test_table sql_attr_uint = group_id } index test { source = srctest path = /test charset_type = utf-8 } group_id @expr test it test it CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `group_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 111, 1, 'this is test' ), ( 222, 1, 'just a test' ), ( 333, 2, 'for test-ing purposes' ), ( 444, 1, 'lets test it' ) sphinx-2.0.4-release/test/test_016/model.bin0000644000176700017710000000264411455516446020102 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"@expr";i:5;}s:7:"matches";a:3:{i:444;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:4;}}i:111;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:1;}}i:222;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:4:"test";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:2:"it";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"test it";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"@expr";i:5;}s:7:"matches";a:4:{i:111;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:-1;}}i:222;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:-1;}}i:333;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"2";s:5:"@expr";d:-1;}}i:444;a:2:{s:6:"weight";s:1:"4";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:5:"@expr";d:-4;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:4:"test";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:2:"it";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"test it";}}}sphinx-2.0.4-release/test/test_041/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_041/test.xml0000644000176700017710000000542211615322601017766 0ustar deogardeogar index_exact_words indexer { mem_limit = 16M } searchd { workers = threads } source srctest { type = mysql sql_query = SELECT document_id, body FROM test_table } index test { source = srctest path = /test index_exact_words = 0 index_exact_words = 1 morphology = none morphology = stem_en } index rt { type = rt path = /rt rt_field = title rt_attr_uint = id1 index_exact_words = 1 morphology = stem_en } work worked worker =work =worked =worker run runs =run =runs REPLACE INTO rt (id, id1, title) VALUES ( 1, 1, 'work worked working workings worker works workers' ), ( 2, 1, 'run' ), ( 3, 1, 'runs' ), ( 4, 1, 'running' ) SELECT * FROM rt WHERE MATCH('work') show meta SELECT * FROM rt WHERE MATCH('worked') show meta SELECT * FROM rt WHERE MATCH('worker') show meta SELECT * FROM rt WHERE MATCH('=work') show meta SELECT * FROM rt WHERE MATCH('=worked') show meta SELECT * FROM rt WHERE MATCH('=worker') show meta SELECT * FROM rt WHERE MATCH('run') show meta SELECT * FROM rt WHERE MATCH('runs') show meta SELECT * FROM rt WHERE MATCH('=run') show meta SELECT * FROM rt WHERE MATCH('=runs') show meta CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'work worked working workings worker works workers' ), ( 2, 'run' ), ( 3, 'runs' ), ( 4, 'running' ) sphinx-2.0.4-release/test/test_041/model.bin0000644000176700017710000012263611615322601020066 0ustar deogardeogara:4:{i:0;a:31:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"work";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worked";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worked";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worker";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=work";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worked";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worked";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worker";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"runs";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"runs";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"runs";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=runs";}i:10;a:2:{s:8:"sphinxql";s:159:" REPLACE INTO rt (id, id1, title) VALUES ( 1, 1, 'work worked working workings worker works workers' ), ( 2, 1, 'run' ), ( 3, 1, 'runs' ), ( 4, 1, 'running' ) ";s:14:"total_affected";i:4;}i:11;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:12;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:13;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:15;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1769";s:3:"id1";s:1:"1";}}}i:16;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:6:"worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:20;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worked";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:22;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:23;a:3:{s:8:"sphinxql";s:35:"SELECT * FROM rt WHERE MATCH('run')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:24;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:25;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('runs')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:26;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:27;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('=run')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:28;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"=run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:29;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=runs')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:30;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=runs";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}}i:1;a:31:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"work";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worked";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worked";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worker";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=work";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worked";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worked";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worker";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"runs";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"runs";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"runs";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=runs";}i:10;a:2:{s:8:"sphinxql";s:159:" REPLACE INTO rt (id, id1, title) VALUES ( 1, 1, 'work worked working workings worker works workers' ), ( 2, 1, 'run' ), ( 3, 1, 'runs' ), ( 4, 1, 'running' ) ";s:14:"total_affected";i:4;}i:11;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:12;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:13;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:15;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1769";s:3:"id1";s:1:"1";}}}i:16;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:6:"worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:20;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worked";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:22;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:23;a:3:{s:8:"sphinxql";s:35:"SELECT * FROM rt WHERE MATCH('run')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:24;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:25;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('runs')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:26;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:27;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('=run')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:28;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"=run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:29;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=runs')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:30;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=runs";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}}i:2;a:31:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"work";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worked";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1769";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worker";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=work";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worked";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1769";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worker";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"runs";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=runs";}i:10;a:2:{s:8:"sphinxql";s:159:" REPLACE INTO rt (id, id1, title) VALUES ( 1, 1, 'work worked working workings worker works workers' ), ( 2, 1, 'run' ), ( 3, 1, 'runs' ), ( 4, 1, 'running' ) ";s:14:"total_affected";i:4;}i:11;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:12;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:13;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:15;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1769";s:3:"id1";s:1:"1";}}}i:16;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:6:"worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:20;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worked";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:22;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:23;a:3:{s:8:"sphinxql";s:35:"SELECT * FROM rt WHERE MATCH('run')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:24;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:25;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('runs')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:26;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:27;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('=run')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:28;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"=run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:29;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=runs')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:30;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=runs";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}}i:3;a:31:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"work";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1847";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worked";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1769";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"worker";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"=work";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=work";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"=worked";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worked";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"=worker";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"=worker";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"runs";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"=run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1695";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"=runs";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"=runs";}i:10;a:2:{s:8:"sphinxql";s:159:" REPLACE INTO rt (id, id1, title) VALUES ( 1, 1, 'work worked working workings worker works workers' ), ( 2, 1, 'run' ), ( 3, 1, 'runs' ), ( 4, 1, 'running' ) ";s:14:"total_affected";i:4;}i:11;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:12;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:13;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1847";s:3:"id1";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"5";}}}i:15;a:3:{s:8:"sphinxql";s:38:"SELECT * FROM rt WHERE MATCH('worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1769";s:3:"id1";s:1:"1";}}}i:16;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:6:"worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=work')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:18;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=work";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:19;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worked')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:20;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worked";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:39:"SELECT * FROM rt WHERE MATCH('=worker')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:22;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:7:"=worker";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:23;a:3:{s:8:"sphinxql";s:35:"SELECT * FROM rt WHERE MATCH('run')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:24;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:25;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('runs')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1442";s:3:"id1";s:1:"1";}}}i:26;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"3";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"3";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:3:"run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"3";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"3";}}}i:27;a:3:{s:8:"sphinxql";s:36:"SELECT * FROM rt WHERE MATCH('=run')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:28;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:4:"=run";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}i:29;a:3:{s:8:"sphinxql";s:37:"SELECT * FROM rt WHERE MATCH('=runs')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1695";s:3:"id1";s:1:"1";}}}i:30;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:1:"1";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:1:"1";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:5:"=runs";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:1:"1";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:1:"1";}}}}}sphinx-2.0.4-release/test/test_123/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_123/test.xml0000644000176700017710000000116711414310750017770 0ustar deogardeogar snippets vs blend_chars searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 blend_chars = +, -, ., U+23,U+40 } select 1; BuildExcerpts($docs, 'test', $query ); ]]> sphinx-2.0.4-release/test/test_123/model.bin0000644000176700017710000000023511414310750020054 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:1:{i:0;a:2:{i:0;s:25:"+ben is bad";i:1;s:72:"who is ben. green-nice is... as no-body bad.";}}}}sphinx-2.0.4-release/test/test_169/0000755000176700017710000000000011724063141016277 5ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/0000755000176700017710000000000011724063141017705 5ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/small1.spa0000644000176700017710000000003011533041453021574 0ustar deogardeogar sphinx-2.0.4-release/test/test_169/refdata/small2.spk0000644000176700017710000000000011533041453021604 0ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/small1.spp0000644000176700017710000000006711533041453021625 0ustar deogardeogarˆ€€„€€ˆ€€ˆ€€ˆ€€ˆ€€„€€„€€ˆ€€ˆ€€sphinx-2.0.4-release/test/test_169/refdata/small1.spd0000644000176700017710000000026311533041453021607 0ustar deogardeogar    "    +sphinx-2.0.4-release/test/test_169/refdata/small2.spd0000644000176700017710000000044611533041453021613 0ustar deogardeogar    "(    1=sphinx-2.0.4-release/test/test_169/refdata/small1.spm0000644000176700017710000000000011533041453021605 0ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/small2.spi0000644000176700017710000000042711533041453021617 0ustar deogardeogarª¿¢Àݘ ‡äÖX‹…×_£¬«ÞÑl¤ø‹'Ž„ï'µåÎP Œá“] ä“u”Š€Q‚ðíǸÅV¬¢ ä±¹lÛ‹V“õ—¦Öd™‘î~¿ÐÈr ¥à»: ¼©ì#¢¬€Pɦõ ŠÖÜy®ºñWÝï8 ªùÕ†þ ãü—{›‹åfùЈ7 ‡†ï2×ç ÍåäI ч  ÑOsphinx-2.0.4-release/test/test_169/refdata/small1.spi0000644000176700017710000000031311533041453021610 0ustar deogardeogarª¿¢ÈÁîf ‹…×_£¬ÐÖÝŽ„ï'µåÎP Œá“] ä“u ”Š€Q‚ðíǸÅV ë¿çY “õ— ÿ¸ÈT¥à»: ¼©ì#ëÒõp ¹‘ÎPÝï8 ªùÕ†þ ãü—{›‹åf‚€Ö÷i ×ç  ÑOsphinx-2.0.4-release/test/test_169/refdata/small2.sph0000644000176700017710000000053711533041453021620 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿgroup_id title Ástyle, script, head-, &, @, !, +, U+23 stem_enrusphinx-2.0.4-release/test/test_169/refdata/small2.spa0000644000176700017710000000006011533041453021600 0ustar deogardeogar sphinx-2.0.4-release/test/test_169/refdata/small1.sps0000644000176700017710000000002311533041453021620 0ustar deogardeogartest onetest twosphinx-2.0.4-release/test/test_169/refdata/small1.spk0000644000176700017710000000000011533041453021603 0ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/small2.spm0000644000176700017710000000000011533041453021606 0ustar deogardeogarsphinx-2.0.4-release/test/test_169/refdata/small2.sps0000644000176700017710000000005711533041453021630 0ustar deogardeogartest onetest two another docdoc number foursphinx-2.0.4-release/test/test_169/refdata/small2.spp0000644000176700017710000000010311533041453021615 0ustar deogardeogarˆ€€„€€ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€„€€„€€ˆ€€ˆ€€ˆ€€sphinx-2.0.4-release/test/test_169/refdata/small1.sph0000644000176700017710000000053711533041453021617 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿgroup_id title »|style, script, head-, &, @, !, +, U+23 stem_enrusphinx-2.0.4-release/test/test_169/test.xml0000644000176700017710000000340711533041453020004 0ustar deogardeogar rotation vs old index format searchd { } indexer { mem_limit = 16M } source dummy { type = mysql sql_query = select * from test_table sql_attr_uint = group_id sql_field_string = title } index small { source = dummy path = /small docinfo = extern } drop table if exists test_table create table test_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, group_id INTEGER NOT NULL, title VARCHAR(255) NOT NULL, content VARCHAR(1024) NOT NULL ) insert into test_table values ( 1, 1, 'test one', 'this is my test document number one. also checking search within phrases.' ); Query ( $words ); if ( $result ) { unset ( $result["time"] ); return $result; } else return $client->GetLastError(); '); $results = array(); $errors = ''; $results[] = $query ( $client, '' ); $results[] = $query ( $client, 'test' ); foreach (explode(" ", "a d h i k m p s") as $c) exec ( "cp $this_test/refdata/small1.sp$c $index_data_path/small.new.sp$c" ); exec ( "kill -HUP `cat $sd_pid_file`" ); sleep ( 1 ); $results[] = $query ( $client, '' ); $results[] = $query ( $client, 'test' ); foreach (explode(" ", "a d h i k m p s") as $c) exec ( "cp $this_test/refdata/small2.sp$c $index_data_path/small.new.sp$c" ); exec ( "kill -HUP `cat $sd_pid_file`" ); sleep ( 1 ); $results[] = $query ( $client, '' ); $results[] = $query ( $client, 'test' ); ]]> sphinx-2.0.4-release/test/test_169/model.bin0000644000176700017710000000557411533041453020104 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:6:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}}i:2;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test two";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test two";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test two";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:2;s:5:"title";s:11:"another doc";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:2;s:5:"title";s:15:"doc number four";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";}i:5;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test one";}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:2:{s:8:"group_id";i:1;s:5:"title";s:8:"test two";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";i:2;s:5:"title";s:15:"doc number four";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}}}}}sphinx-2.0.4-release/test/test_086/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_086/test.xml0000644000176700017710000000265411421075337020011 0ustar deogardeogar index rotation vs pconns indexer { mem_limit = 16M } searchd { workers = none workers = threads binlog_path = workers = fork workers = prefork } source test1 { type = mysql sql_query = SELECT * FROM test_table sql_query_post_index = UPDATE test_table SET id=id+100 } index test1 { source = test1 path = /test1 } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, title varchar(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 'hier kommt die sonne' ), ( 2, 'hier kommt die sonne' ), ( 3, 'sie est der hellste stern von allen' ), ( 4, 'hier kommt die sonne' ); Open (); $res1 = $client->Query ( "sonne", "test1" ); unset ( $res1["time"] ); $rv = 0; $err = ""; exec ( $g_locals["indexer"]." --config config.conf --rotate --all", $err, $rv ); usleep ( 1500000 ); $res2 = $client->Query ( "sonne", "test1" ); unset ( $res2["time"] ); $client->Close(); $results = array ( $res1, $res2 ); if ( !$res1 || !$res2 || $rv!=0 ) $results = false; ]]> sphinx-2.0.4-release/test/test_086/model.bin0000644000176700017710000000650011327327724020101 0ustar deogardeogara:4:{i:0;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:104;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}}}i:1;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:201;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:202;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:204;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:301;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:302;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:304;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}}}i:2;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:401;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:402;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:404;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:501;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:502;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:504;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}}}i:3;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:601;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:602;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:604;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:701;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:702;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:704;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:5:"words";a:1:{s:5:"sonne";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}}}}}sphinx-2.0.4-release/test/test_179/0000755000176700017710000000000011724063141016300 5ustar deogardeogarsphinx-2.0.4-release/test/test_179/test.xml0000644000176700017710000000767411567450025020025 0ustar deogardeogar Multi queries + (plain|rt|dist)indexes + group by indexer { mem_limit = 16M } searchd { workers = threads } source test1 { type = mysql sql_query = select * from test_table1 sql_attr_uint = gid } source test2 { type = mysql sql_query = select * from test_table2 sql_attr_uint = gid } index test_plain { source = test1 path = /test_plain } index test_plain_for_dist { source = test2 path = /test_plain_for_dist } index test_rt { type = rt path = data/test_rt rt_attr_uint = gid rt_field = title } index test_dist { type = distributed agent = :test_plain_for_dist } CREATE TABLE test_table1 ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table1; INSERT INTO test_table1 VALUES ( 1, 1, 'word | 1' ); INSERT INTO test_table1 VALUES ( 2, 2, 'word | 2' ); INSERT INTO test_table1 VALUES ( 3, 2, 'word | 3' ); CREATE TABLE test_table2 ( id INTEGER PRIMARY KEY NOT NULL, gid INTEGER NOT NULL, title VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table2; INSERT INTO test_table2 VALUES ( 7, 1, 'word | 7' ); INSERT INTO test_table2 VALUES ( 8, 2, 'word | 8' ); INSERT INTO test_table2 VALUES ( 9, 2, 'word | 9' ); SetGroupBy ( "gid", SPH_GROUPBY_ATTR ); $client->AddQuery ( $query, $index1 ); $client->ResetGroupBy(); $client->AddQuery ( $query, $index2 ); $sphinxql = @mysql_connect ( sprintf ( "%s:%s", $sd_address, $sd_sphinxql_port ) ); @mysql_query ( "DELETE FROM test_rt" ); @mysql_query ( "INSERT INTO test_rt ( id, gid, title ) VALUES ( 4, 1, \'word | 4\' )" ); @mysql_query ( "INSERT INTO test_rt ( id, gid, title ) VALUES ( 5, 2, \'word | 5\' )" ); @mysql_query ( "INSERT INTO test_rt ( id, gid, title ) VALUES ( 6, 2, \'word | 6\' )" ); $res = $client->RunQueries(); $query_res = $res[0]; { $results[] = sprintf ( "query: \"%s\" index: %s", $query, $index1 ); foreach ( $query_res["matches"] as $match ) $results[] = sprintf ( "\tid: %d, weight: %d, gid: %d, @groupby: %d, @count: %d", $match["id"], $match["weight"], $match["attrs"]["gid"], $match["attrs"]["@groupby"], $match["attrs"]["@count"] ); } $query_res = $res[1]; { $results[] = sprintf ( "query: \"%s\" index: %s", $query, $index2 ); foreach ( $query_res["matches"] as $match ) $results[] = sprintf ( "\tid: %d, weight: %d, gid: %d", $match["id"], $match["weight"], $match["attrs"]["gid"] ); } $results[] = ""; } ' ); $client->SetArrayResult(true); global $sd_address, $sd_sphinxql_port; $run_subtest ( $client, "test_plain", "test_plain", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_rt", "test_rt", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_dist", "test_dist", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_plain", "test_rt", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_plain", "test_dist", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_rt", "test_dist", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_rt", "test_plain", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_dist", "test_plain", $results, $sd_address, $sd_sphinxql_port ); $run_subtest ( $client, "test_dist", "test_rt", $results, $sd_address, $sd_sphinxql_port ); ]]> sphinx-2.0.4-release/test/test_179/model.bin0000644000176700017710000000573311567450025020110 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:72:{i:0;s:31:"query: "word" index: test_plain";i:1;s:49:" id: 2, weight: 1, gid: 2, @groupby: 2, @count: 2";i:2;s:49:" id: 1, weight: 1, gid: 1, @groupby: 1, @count: 1";i:3;s:31:"query: "word" index: test_plain";i:4;s:25:" id: 1, weight: 1, gid: 1";i:5;s:25:" id: 2, weight: 1, gid: 2";i:6;s:25:" id: 3, weight: 1, gid: 2";i:7;s:0:"";i:8;s:28:"query: "word" index: test_rt";i:9;s:49:" id: 5, weight: 1, gid: 2, @groupby: 2, @count: 2";i:10;s:49:" id: 4, weight: 1, gid: 1, @groupby: 1, @count: 1";i:11;s:28:"query: "word" index: test_rt";i:12;s:25:" id: 4, weight: 1, gid: 1";i:13;s:25:" id: 5, weight: 1, gid: 2";i:14;s:25:" id: 6, weight: 1, gid: 2";i:15;s:0:"";i:16;s:30:"query: "word" index: test_dist";i:17;s:49:" id: 8, weight: 1, gid: 2, @groupby: 2, @count: 2";i:18;s:49:" id: 7, weight: 1, gid: 1, @groupby: 1, @count: 1";i:19;s:30:"query: "word" index: test_dist";i:20;s:25:" id: 7, weight: 1, gid: 1";i:21;s:25:" id: 8, weight: 1, gid: 2";i:22;s:25:" id: 9, weight: 1, gid: 2";i:23;s:0:"";i:24;s:31:"query: "word" index: test_plain";i:25;s:49:" id: 2, weight: 1, gid: 2, @groupby: 2, @count: 2";i:26;s:49:" id: 1, weight: 1, gid: 1, @groupby: 1, @count: 1";i:27;s:28:"query: "word" index: test_rt";i:28;s:25:" id: 4, weight: 1, gid: 1";i:29;s:25:" id: 5, weight: 1, gid: 2";i:30;s:25:" id: 6, weight: 1, gid: 2";i:31;s:0:"";i:32;s:31:"query: "word" index: test_plain";i:33;s:49:" id: 2, weight: 1, gid: 2, @groupby: 2, @count: 2";i:34;s:49:" id: 1, weight: 1, gid: 1, @groupby: 1, @count: 1";i:35;s:30:"query: "word" index: test_dist";i:36;s:25:" id: 7, weight: 1, gid: 1";i:37;s:25:" id: 8, weight: 1, gid: 2";i:38;s:25:" id: 9, weight: 1, gid: 2";i:39;s:0:"";i:40;s:28:"query: "word" index: test_rt";i:41;s:49:" id: 5, weight: 1, gid: 2, @groupby: 2, @count: 2";i:42;s:49:" id: 4, weight: 1, gid: 1, @groupby: 1, @count: 1";i:43;s:30:"query: "word" index: test_dist";i:44;s:25:" id: 7, weight: 1, gid: 1";i:45;s:25:" id: 8, weight: 1, gid: 2";i:46;s:25:" id: 9, weight: 1, gid: 2";i:47;s:0:"";i:48;s:28:"query: "word" index: test_rt";i:49;s:49:" id: 5, weight: 1, gid: 2, @groupby: 2, @count: 2";i:50;s:49:" id: 4, weight: 1, gid: 1, @groupby: 1, @count: 1";i:51;s:31:"query: "word" index: test_plain";i:52;s:25:" id: 1, weight: 1, gid: 1";i:53;s:25:" id: 2, weight: 1, gid: 2";i:54;s:25:" id: 3, weight: 1, gid: 2";i:55;s:0:"";i:56;s:30:"query: "word" index: test_dist";i:57;s:49:" id: 8, weight: 1, gid: 2, @groupby: 2, @count: 2";i:58;s:49:" id: 7, weight: 1, gid: 1, @groupby: 1, @count: 1";i:59;s:31:"query: "word" index: test_plain";i:60;s:25:" id: 1, weight: 1, gid: 1";i:61;s:25:" id: 2, weight: 1, gid: 2";i:62;s:25:" id: 3, weight: 1, gid: 2";i:63;s:0:"";i:64;s:30:"query: "word" index: test_dist";i:65;s:49:" id: 8, weight: 1, gid: 2, @groupby: 2, @count: 2";i:66;s:49:" id: 7, weight: 1, gid: 1, @groupby: 1, @count: 1";i:67;s:28:"query: "word" index: test_rt";i:68;s:25:" id: 4, weight: 1, gid: 1";i:69;s:25:" id: 5, weight: 1, gid: 2";i:70;s:25:" id: 6, weight: 1, gid: 2";i:71;s:0:"";}}}sphinx-2.0.4-release/test/test_182/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_182/test.xml0000644000176700017710000000630511634664540020011 0ustar deogardeogar attributes update vs workers vs API and SphinxQL indexer { mem_limit = 16M } searchd { attr_flush_period = 10000 workers = threads workers = prefork workers = fork } source src { type = mysql sql_query = SELECT id, text, 1 as gid, mva1 FROM test_table sql_attr_uint = gid sql_attr_multi = uint mva1 from field mva1 sql_attr_multi = bigint mva1 from field mva1 } index idx { source = src path = /idx charset_type = utf-8 docinfo = extern } GetLastError()); global $sd_address, $sd_sphinxql_port; $sockStr = "$sd_address:$sd_sphinxql_port"; if ($sd_address == "localhost") $sockStr = "127.0.0.1:$sd_sphinxql_port"; $sock = @mysql_connect ($sockStr,"","", true ); if ( $sock === false ) { $results[] = "error: can not connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } $res = @mysql_query ( "select * from idx" ); while ($row = @mysql_fetch_array($res, MYSQL_ASSOC)) { $line = ""; foreach ($row as $key => $value) $line .= $value . "; "; $results[] = $line; } return $results; '); global $sd_address, $sd_sphinxql_port; $sockStr = "$sd_address:$sd_sphinxql_port"; if ($sd_address == "localhost") $sockStr = "127.0.0.1:$sd_sphinxql_port"; $sock = @mysql_connect ($sockStr,'','', true ); if ( $sock === false ) { $results[] = "error: can't connect to searchd: " . @mysql_errno ( $sock ) . " : " . @mysql_error ( $sock ); return; } @mysql_query ( 'update idx set mva1=(3,2, 1, 2), mva1=(1, 2) where id=1' ); @mysql_close($sock); $results = array_merge ( $results, $restartD ( $client ) ); $up = $client->UpdateAttributes ( "idx", array("mva1"), array(1=>array(array(2,3,4)), 3=>array(array(6,7,8))),true); if ( $up >= 0 ) $results[] = sprintf("up.ok=%d", $up); else $results[] = sprintf("up.err=%s", $client->GetLastError()); $results = array_merge ( $results, $restartD ( $client ) ); // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `mva1` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `mva1`) VALUES (1, 'test1', '1001'), (2, 'test2', '1002 1023 4456'), (3, 'test3', '1003 1008 1010'), (4, 'test4', '1004 1005 1006'); sphinx-2.0.4-release/test/test_182/model.bin0000644000176700017710000000425411605620330020066 0ustar deogardeogara:6:{i:0;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}i:1;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}i:2;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}i:3;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}i:4;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}i:5;a:1:{i:0;a:13:{i:0;i:0;i:1;s:10:"started=ok";i:2;s:14:"1; 1; 1; 1,2; ";i:3;s:25:"2; 1; 1; 1002,1023,4456; ";i:4;s:25:"3; 1; 1; 1003,1008,1010; ";i:5;s:25:"4; 1; 1; 1004,1005,1006; ";i:6;s:7:"up.ok=2";i:7;i:0;i:8;s:10:"started=ok";i:9;s:16:"1; 1; 1; 2,3,4; ";i:10;s:25:"2; 1; 1; 1002,1023,4456; ";i:11;s:16:"3; 1; 1; 6,7,8; ";i:12;s:25:"4; 1; 1; 1004,1005,1006; ";}}}sphinx-2.0.4-release/test/test_054/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_054/test.xml0000644000176700017710000000304711470740627020006 0ustar deogardeogar quorum indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table } index test { source = test path = /test } index test_crc { source = test path = /crc dict = crc min_prefix_len = 1 enable_star = 1 } index test_kw { source = test path = /kw dict = keywords min_prefix_len = 1 enable_star = 1 } CREATE TABLE test_table ( document_id INT NOT NULL, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table ( document_id, text ) VALUES ( 1, 'hello world' ), ( 2, 'one two three four five' ); "hello heaven"/1 "hello from above"/2 "one two foo bar"/3 "one two two bar"/3 "one two two bar"/2 "two two one three"/3 "two two one foo"/3 "o* t*"/2 "o* t*"/2 "h* w* f*"/2 "h* w* f*"/2 sphinx-2.0.4-release/test/test_054/model.bin0000644000176700017710000001274011470740627020077 0ustar deogardeogara:1:{i:0;a:11:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1571";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.151";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"heaven";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:""hello heaven"/1";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.004";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"from";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"above";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:20:""hello from above"/2";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.005";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"foo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"bar";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""one two foo bar"/3";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.006";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"bar";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""one two two bar"/3";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2595";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"bar";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""one two two bar"/2";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.007";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"three";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""two two one three"/3";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.006";s:5:"words";a:3:{s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"foo";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""two two one foo"/3";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:90:"quorum threshold too high (words=2, thresh=2); replacing quorum operator with AND operator";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2670";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.012";s:5:"words";a:2:{s:2:"o*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"t*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""o* t*"/2";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:90:"quorum threshold too high (words=2, thresh=2); replacing quorum operator with AND operator";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.063";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"three";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""o* t*"/2";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2595";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.053";s:5:"words";a:3:{s:2:"h*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"w*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"f*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""h* w* f*"/2";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2571";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""h* w* f*"/2";}}}sphinx-2.0.4-release/test/test_141/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_141/test.xml0000644000176700017710000000173711434476530020006 0ustar deogardeogar Crashing of indexer id64 with keywords dict indexer { mem_limit = 64M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM table141 sql_attr_uint = uint1 } index main { source = srcmain path = /main charset_type = utf-8 dict = keywords } select * from main where match('00001') CREATE TABLE `table141` ( `document_id` int(11) NOT NULL default '0', `uint1` int(11) NOT NULL default '0', `body` varchar(25) NOT NULL default '' ) DROP TABLE IF EXISTS `table141` INSERT INTO `table141` VALUES (1, 1, '00001'), (2, 2, '00002'), (3, 3, '00003') sphinx-2.0.4-release/test/test_141/model.bin0000644000176700017710000000030611455516446020072 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:3:{s:8:"sphinxql";s:39:"select * from main where match('00001')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:5:"uint1";s:1:"1";}}}}}sphinx-2.0.4-release/test/test_138/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_138/test.xml0000644000176700017710000000333211432313515017774 0ustar deogardeogar quorum vs decreased matched word indexer { mem_limit = 16M } searchd { } source test1 { type = mysql sql_query = SELECT * FROM test_table1 } index test1 { source = test1 path = /test1 docinfo = extern } source test2 { type = mysql sql_query = SELECT * FROM test_table2 } index test2 { source = test2 path = /test2 docinfo = extern } CREATE TABLE test_table1 ( document_id INT NOT NULL, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table1; CREATE TABLE test_table2 ( document_id INT NOT NULL, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table2; 'world space', 2=>'one', 3=>'two', 4=>'world', 5=>'space', 6=>'unused1', 7=>'unused2' ); foreach ( $sql_data as $key => $value ) { $text = sprintf ( "insert into test_table1 values ( %d, '%s' )", $key, $value ); mysql_query ( $text ); } $text = sprintf ( "insert into test_table2 values ( 1, '%s' )", $sql_data[1] ); for ( $i=1; $i < 513; $i++ ) { $text = ''; $text = sprintf ( "insert into test_table2 values ( %d, '%s' )", $i, $sql_data[1] ); mysql_query ( $text ); } foreach ( $sql_data as $key => $value ) { $text = sprintf ( "insert into test_table2 values ( %d, '%s' )", 600+$key, $value ); mysql_query ( $text ); } ]]> "one two unused1 unused2 space world"/2 "one two unused1 unused2 space world"/2 sphinx-2.0.4-release/test/test_138/model.bin0000644000176700017710000007050611432313515020074 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1540";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:6:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"unused1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"unused2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"space";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:39:""one two unused1 unused2 space world"/2";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:513:{i:1;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:9;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:17;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:18;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:19;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:20;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:21;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:22;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:23;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:24;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:25;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:26;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:27;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:28;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:29;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:30;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:31;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:32;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:33;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:34;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:35;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:36;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:37;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:38;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:39;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:40;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:41;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:42;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:43;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:44;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:45;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:46;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:47;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:48;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:49;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:50;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:51;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:52;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:53;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:54;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:55;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:56;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:57;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:58;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:59;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:60;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:61;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:62;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:63;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:64;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:65;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:66;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:67;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:68;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:69;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:70;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:71;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:72;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:73;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:74;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:75;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:76;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:77;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:78;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:79;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:80;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:81;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:82;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:83;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:84;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:85;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:86;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:87;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:88;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:89;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:90;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:91;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:92;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:93;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:94;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:95;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:96;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:97;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:98;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:99;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:100;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:101;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:102;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:103;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:104;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:105;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:106;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:107;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:108;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:109;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:110;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:112;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:113;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:114;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:115;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:116;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:117;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:118;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:119;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:120;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:121;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:122;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:123;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:124;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:125;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:126;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:127;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:128;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:129;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:130;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:131;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:132;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:133;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:134;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:135;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:136;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:137;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:138;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:139;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:140;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:141;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:142;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:143;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:144;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:145;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:146;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:147;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:148;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:149;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:150;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:151;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:152;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:153;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:154;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:155;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:156;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:157;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:158;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:159;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:160;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:161;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:162;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:163;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:164;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:165;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:166;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:167;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:168;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:169;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:170;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:171;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:172;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:173;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:174;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:175;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:176;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:177;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:178;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:179;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:180;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:181;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:182;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:183;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:184;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:185;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:186;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:187;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:188;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:189;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:190;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:191;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:192;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:193;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:194;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:195;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:196;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:197;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:198;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:199;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:200;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:201;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:202;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:203;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:204;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:205;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:206;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:207;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:208;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:209;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:210;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:211;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:212;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:213;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:214;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:215;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:216;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:217;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:218;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:219;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:220;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:221;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:222;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:223;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:224;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:225;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:226;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:227;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:228;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:229;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:230;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:231;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:232;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:233;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:234;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:235;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:236;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:237;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:238;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:239;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:240;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:241;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:242;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:243;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:244;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:245;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:246;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:247;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:248;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:249;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:250;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:251;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:252;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:253;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:254;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:255;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:256;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:257;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:258;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:259;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:260;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:261;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:262;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:263;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:264;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:265;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:266;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:267;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:268;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:269;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:270;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:271;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:272;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:273;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:274;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:275;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:276;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:277;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:278;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:279;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:280;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:281;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:282;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:283;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:284;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:285;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:286;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:287;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:288;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:289;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:290;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:291;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:292;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:293;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:294;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:295;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:296;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:297;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:298;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:299;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:300;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:301;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:302;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:303;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:304;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:305;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:306;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:307;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:308;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:309;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:310;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:311;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:312;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:313;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:314;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:315;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:316;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:317;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:318;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:319;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:320;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:321;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:322;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:323;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:324;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:325;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:326;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:327;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:328;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:329;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:330;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:331;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:332;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:333;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:334;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:335;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:336;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:337;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:338;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:339;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:340;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:341;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:342;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:343;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:344;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:345;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:346;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:347;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:348;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:349;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:350;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:351;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:352;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:353;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:354;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:355;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:356;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:357;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:358;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:359;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:360;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:361;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:362;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:363;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:364;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:365;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:366;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:367;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:368;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:369;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:370;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:371;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:372;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:373;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:374;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:375;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:376;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:377;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:378;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:379;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:380;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:381;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:382;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:383;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:384;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:385;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:386;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:387;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:388;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:389;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:390;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:391;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:392;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:393;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:394;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:395;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:396;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:397;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:398;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:399;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:400;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:401;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:402;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:403;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:404;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:405;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:406;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:407;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:408;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:409;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:410;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:411;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:412;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:413;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:414;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:415;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:416;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:417;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:418;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:419;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:420;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:421;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:422;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:423;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:424;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:425;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:426;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:427;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:428;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:429;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:430;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:431;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:432;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:433;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:434;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:435;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:436;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:437;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:438;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:439;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:440;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:441;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:442;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:443;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:444;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:445;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:446;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:447;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:448;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:449;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:450;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:451;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:452;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:453;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:454;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:455;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:456;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:457;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:458;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:459;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:460;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:461;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:462;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:463;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:464;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:465;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:466;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:467;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:468;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:469;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:470;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:471;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:472;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:473;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:474;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:475;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:476;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:477;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:478;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:479;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:480;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:481;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:482;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:483;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:484;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:485;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:486;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:487;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:488;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:489;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:490;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:491;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:492;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:493;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:494;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:495;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:496;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:497;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:498;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:499;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:500;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:501;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:502;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:503;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:504;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:505;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:506;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:507;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:508;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:509;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:510;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:511;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:512;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}i:601;a:2:{s:6:"weight";s:4:"1446";s:5:"attrs";a:0:{}}}s:5:"total";s:3:"513";s:11:"total_found";s:3:"513";s:4:"time";s:5:"0.012";s:5:"words";a:6:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"unused1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"unused2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"space";a:2:{s:4:"docs";s:3:"514";s:4:"hits";s:3:"514";}s:5:"world";a:2:{s:4:"docs";s:3:"514";s:4:"hits";s:3:"514";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:39:""one two unused1 unused2 space world"/2";}}}sphinx-2.0.4-release/test/html_120.txt0000644000176700017710000000010711401115433017000 0ustar deogardeogar

    That paper also reminds me end point
    cool friendsphinx-2.0.4-release/test/test_024/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_024/test.xml0000644000176700017710000000465311004433405017771 0ustar deogardeogar min_word_len vs queries (part 2) indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = sbcs enable_star = 1 min_word_len = 3 min_word_len = 4 min_word_len = 5 min_prefix_len = 3 min_prefix_len = 4 min_prefix_len = 5 } a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* hello me world hello two world hello four world hello me* world hello two* world hello four* world CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'a' ), ( 2, 'bb' ), ( 3, 'ccc' ), ( 4, 'dddd' ), ( 5, 'eeeee' ), ( 6, 'ffffff' ), ( 7, 'ggggggg' ), ( 8, 'hello world' ), ( 9, 'hello a world' ), ( 10, 'hello aa world' ), ( 11, 'hello aaa world' ), ( 12, 'hello aaaa world' ), ( 13, 'hello aaaaa world' ), ( 14, 'hello me world' ), ( 15, 'hello two world' ), ( 16, 'hello four world' ) sphinx-2.0.4-release/test/test_024/model.bin0000644000176700017710000032033111004433405020054 0ustar deogardeogara:9:{i:0;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.013";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:1;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:2;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:3;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:4;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:5;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:6;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:7;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:8;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}}sphinx-2.0.4-release/test/test_033/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_033/test.xml0000644000176700017710000000174411053343313017771 0ustar deogardeogar ignore_chars vs specials indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 ignore_chars = U+002D } hello -world -hello world hello world CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'hello world' ), ( 2, 'hello' ), ( 3, 'world' ) sphinx-2.0.4-release/test/test_033/model.bin0000644000176700017710000000501011053343313020050 0ustar deogardeogara:2:{i:0;a:3:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.033";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:12:"hello -world";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:12:"-hello world";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:11:"hello world";}}i:1;a:3:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:12:"hello -world";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:12:"-hello world";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"world";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:11:"hello world";}}}sphinx-2.0.4-release/test/test_158/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_158/test.xml0000644000176700017710000000242511500145433017776 0ustar deogardeogar multiforms vs quorum indexer { mem_limit = 16M } searchd { workers = threads } index rt { type = rt docinfo = extern charset_type = utf-8 path = /rt wordforms = wordforms.txt rt_attr_uint = gid rt_field = body rt_mem_limit = 8M } insert into rt (id, gid, body) values ( 1, 123, 'Although various ancient traditions refer to a lost antediluvian world, the one that stands out is the tradition in the early chapters of Genesis.' ) insert into rt (id, gid, body) values ( 2, 123, 'There are many means of transportation all over India and Nepal which can be called shandrydan due to their awkwardness.' ) insert into rt (id, gid, body) values ( 3, 123, 'If Walker was bothered by my hard look, he hid it well.' ) select * from rt where match ('antediluvian') select * from rt where match ('shandrydan') select * from rt where match ('"something antediluvian"/1') select * from rt where match ('"battered shandrydan"/1') sphinx-2.0.4-release/test/test_158/model.bin0000644000176700017710000000265011500145433020067 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:2:{s:8:"sphinxql";s:198:"insert into rt (id, gid, body) values ( 1, 123, 'Although various ancient traditions refer to a lost antediluvian world, the one that stands out is the tradition in the early chapters of Genesis.' )";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:172:"insert into rt (id, gid, body) values ( 2, 123, 'There are many means of transportation all over India and Nepal which can be called shandrydan due to their awkwardness.' )";s:14:"total_affected";i:1;}i:2;a:2:{s:8:"sphinxql";s:107:"insert into rt (id, gid, body) values ( 3, 123, 'If Walker was bothered by my hard look, he hid it well.' )";s:14:"total_affected";i:1;}i:3;a:3:{s:8:"sphinxql";s:45:"select * from rt where match ('antediluvian')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1680";s:3:"gid";s:3:"123";}}}i:4;a:3:{s:8:"sphinxql";s:43:"select * from rt where match ('shandrydan')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1680";s:3:"gid";s:3:"123";}}}i:5;a:3:{s:8:"sphinxql";s:59:"select * from rt where match ('"something antediluvian"/1')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1590";s:3:"gid";s:3:"123";}}}i:6;a:3:{s:8:"sphinxql";s:56:"select * from rt where match ('"battered shandrydan"/1')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1590";s:3:"gid";s:3:"123";}}}}}sphinx-2.0.4-release/test/test_018/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_018/test.xml0000644000176700017710000000461411662472433020007 0ustar deogardeogar snippets indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT id, body FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 charset_type = sbcs min_word_len = 1 min_word_len = 3 stopwords = stopwords.txt synonyms = synonyms.txt ngram_len = 1 ngram_chars = U+3000..U+2FA1F } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test' ) $docs = array ( "ab ab ab ab ab ab ab ab ab ab ab MS", " this, is . MS windows....?", "this , is a MS but not Windows", "this is the Microsoft Windows , ,", "pro\xEF\xAC\x81ts, lost savings" // latin small ligature 'fi', oh my ); $words = "MS Windows"; $opts = array ( "before_match" => "[B]", "after_match" => "[A]", "chunk_separator" => " ... ", "limit" => 100, "around" => 2 ); $results = array (); $results [] = $opts; $res = $client->BuildExcerpts ( $docs, "test_idx", $words, $opts ); if ( !$res ) { $results = false; return; } $results [] = $res; $opts = array ( "before_match" => "[B]", "after_match" => "[A]", "chunk_separator" => " ... ", "limit" => 50, "around" => 2 ); $results [] = $opts; $res = $client->BuildExcerpts ( $docs, "test_idx", $words, $opts ); if ( !$res ) { $results = false; return; } $results [] = $res; $opts = array ( "before_match" => "[B]", "after_match" => "[A]", "chunk_separator" => " ... ", "limit" => 30, "around" => 2 ); $results [] = $opts; $res = $client->BuildExcerpts ( $docs, "test_idx", $words, $opts ); if ( !$res ) { $results = false; return; } $results [] = $res; sphinx-2.0.4-release/test/test_018/model.bin0000644000176700017710000004435111662472433020102 0ustar deogardeogara:16:{i:0;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:27:" ... but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:1;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:27:" ... but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:2;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:3;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:4;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:27:" ... but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:5;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:27:" ... but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:6;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:7;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:34:" this, is . MS windows....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:34:" this, is . MS windows ... ";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the [B]Microsoft Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:8;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:20:" ... ab ab [B]MS[A]";i:1;s:44:" ... , is . [B]MS[A] [B]windows[A]....?";i:2;s:45:" ... , is a [B]MS[A] but not [B]Windows[A]";i:3;s:38:" ... the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:9;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:20:" ... ab ab [B]MS[A]";i:1;s:44:" ... , is . [B]MS[A] [B]windows[A]....?";i:2;s:45:" ... , is a [B]MS[A] but not [B]Windows[A]";i:3;s:38:" ... the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:10;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:38:" ... , is . MS [B]windows[A]....?";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:11;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:38:" ... , is . MS [B]windows[A]....?";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:12;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:20:" ... ab ab [B]MS[A]";i:1;s:44:" ... , is . [B]MS[A] [B]windows[A]....?";i:2;s:45:" ... , is a [B]MS[A] but not [B]Windows[A]";i:3;s:38:" ... the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:13;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:41:"ab ab ab ab ab ab ab ab ab ab ab [B]MS[A]";i:1;s:46:" this, is . [B]MS[A] [B]windows[A]....?";i:2;s:44:"this , is a [B]MS[A] but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:20:" ... ab ab [B]MS[A]";i:1;s:44:" ... , is . [B]MS[A] [B]windows[A]....?";i:2;s:45:" ... , is a [B]MS[A] but not [B]Windows[A]";i:3;s:38:" ... the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:14;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:38:" ... , is . MS [B]windows[A]....?";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}i:15;a:1:{i:0;a:6:{i:0;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:100;s:6:"around";i:2;}i:1;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:2;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:50;s:6:"around";i:2;}i:3;a:5:{i:0;s:35:"ab ab ab ab ab ab ab ab ab ab ab MS";i:1;s:40:" this, is . MS [B]windows[A]....?";i:2;s:38:"this , is a MS but not [B]Windows[A]";i:3;s:40:"this is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}i:4;a:5:{s:12:"before_match";s:3:"[B]";s:11:"after_match";s:3:"[A]";s:15:"chunk_separator";s:5:" ... ";s:5:"limit";i:30;s:6:"around";i:2;}i:5;a:5:{i:0;s:34:"ab ab ab ab ab ab ab ab ab ab ... ";i:1;s:38:" ... , is . MS [B]windows[A]....?";i:2;s:39:" ... , is a MS but not [B]Windows[A]";i:3;s:41:" ... is the Microsoft [B]Windows[A] , ,";i:4;s:22:"proï¬ts, lost savings";}}}}sphinx-2.0.4-release/test/test_043/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_043/test.xml0000644000176700017710000000333211712323447017775 0ustar deogardeogar unpack indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT id, t_zlib, at_1, t_mysql, plain, at_2, at_3, t_mysql_2 FROM test_table unpack_zlib = t_zlib unpack_mysqlcompress = t_mysql unpack_mysqlcompress = t_mysql_2 sql_attr_uint = at_1 sql_attr_uint = at_2 sql_attr_uint = at_3 } index test_idx { source = srctest path = /test } CREATE TABLE test_table ( id integer primary key not null auto_increment, t_zlib blob, t_mysql blob, t_mysql_2 blob, plain varchar(256), at_1 int not null default 1, at_2 int not null default 2, at_3 int not null default 3 ) ENGINE=MYISAM DROP TABLE IF EXISTS test_table; INSERT INTO test_table (t_zlib, t_mysql, t_mysql_2, plain) VALUES ( substr(compress('zlib'), 5), NULL, NULL, 'plain' ), ( NULL, compress('mysql'), NULL, NULL ), ( substr(compress('test'), 5), compress('hello'), compress('world'), '' ), ( 'malformed', 'broken', NULL, NULL ), ( substr(COMPRESS ( REPEAT ( 'the dog with snoopy this book like ', 24000 ) ), 5), COMPRESS ( REPEAT ( 'spaceman is a hero not good mooman ', 24001 ) ), NULL, NULL ), ( COMPRESS ( REPEAT ( 'the dog with snoopy ', 16384 ) ), COMPRESS ( REPEAT ( 'spaceman is a hero not as we ', 16384 ) ), NULL, NULL ), ( NULL, 'a', NULL, NULL ), ( '', '', '', '' ); zlib mysql hello world plain malformed broken the a sphinx-2.0.4-release/test/test_043/model.bin0000644000176700017710000001041111712323447020062 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"zlib";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"zlib";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"mysql";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"mysql";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"hello world";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"plain";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"plain";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"malformed";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"malformed";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"broken";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"broken";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:5:"24000";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"the";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:6:"t_zlib";i:1;s:7:"t_mysql";i:2;s:5:"plain";i:3;s:9:"t_mysql_2";}s:5:"attrs";a:3:{s:4:"at_1";i:1;s:4:"at_2";i:1;s:4:"at_3";i:1;}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:4:"at_1";s:1:"1";s:4:"at_2";s:1:"2";s:4:"at_3";s:1:"3";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"a";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:5:"40385";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:1:"a";}}}sphinx-2.0.4-release/test/test_026/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_026/test.xml0000644000176700017710000000314311323636205017773 0ustar deogardeogar merge vs basic main/delta indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT document_id, body, body sbody FROM test_table WHERE document_id in (1,2,11,12,13) sql_attr_string = sbody } source srcdelta { type = mysql sql_query = SELECT document_id, body, body sbody FROM test_table WHERE document_id in (3,4,14,15,16) sql_attr_string = sbody } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta main1 main2 delta1 delta2 gamma0 aaa xxx yyy zzz CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'main1' ), ( 2, 'main2' ), ( 3, 'delta1' ), ( 4, 'delta2' ), ( 11, 'xxx yyy zzz aaa' ), ( 12, 'xxx aaa yyy' ), ( 13, 'zzz' ), ( 14, 'zzz' ), ( 15, 'xxx aaa yyy' ), ( 16, 'xxx aaa' ) sphinx-2.0.4-release/test/test_026/model.bin0000644000176700017710000001106411216572754020076 0ustar deogardeogara:1:{i:0;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:5:"main1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"main1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main1";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:5:"main2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"main2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main2";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:6:"delta1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"delta1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta1";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:6:"delta2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"delta2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta2";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"gamma0";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"gamma0";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:4:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:15:"xxx yyy zzz aaa";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:7:"xxx aaa";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"aaa";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"aaa";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:4:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:15:"xxx yyy zzz aaa";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:7:"xxx aaa";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"xxx";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"xxx";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:3:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:15:"xxx yyy zzz aaa";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:11:"xxx aaa yyy";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"yyy";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"yyy";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"sbody";i:7;}s:7:"matches";a:3:{i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:15:"xxx yyy zzz aaa";}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:3:"zzz";}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"sbody";s:3:"zzz";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"zzz";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"zzz";}}}sphinx-2.0.4-release/test/test_062/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_062/test.xml0000644000176700017710000000313411662472433020002 0ustar deogardeogar snippets vs query highlighting indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT 1, 'title' as title, 'text' as text; } index test { source = test path = /test charset_type = utf-8 phrase_boundary = U+002C phrase_boundary_step = 100 } select 1; '[B]', 'after_match' => '[A]', 'chunk_separator' => ' ... ', 'limit' => 255, 'around' => 2, 'query_mode' => 1 ); $text = 'Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.'; $queries = array ( '^sphinx month$', '^sphinx queries$', '^clusters month$', '^*inx *bytes', '*i*', '*on*', '*s', '"clusters scale"', '"clusters do not scale"', // false claims don't get highlighted '"of d*"', 'terabyte* << quer*', 'data << terabyte*', '"sphinx scale"~3', '"sphinx billions"~3', '"silly documents"/1', '"clusters scale to billions"', '"queries per month" | month | "per month"', '"of d*" | "of data"', '"of data" -"of hedgedogs"', '"documents terabytes"', // crosses boundary '@title sphinx', '@text sphinx', '@text[3] sphinx', '@text[3] documents', '@text[7] documents', // case shouldn't matter 'SPHINX', 'SPH*', '*PHI*', '*INX', ); $results = array(); foreach ( $queries as $query ) { $reply = $client->BuildExcerpts ( array($text), 'test', $query, $opts ); $results [] = $query; $results [] = $reply; } ]]> sphinx-2.0.4-release/test/test_062/model.bin0000644000176700017710000001107611560651166020076 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:58:{i:0;s:14:"^sphinx month$";i:1;a:1:{i:0;s:113:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per [B]month[A].";}i:2;s:16:"^sphinx queries$";i:3;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:4;s:16:"^clusters month$";i:5;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:6;s:12:"^*inx *bytes";i:7;a:1:{i:0;s:113:"[B]Sphinx[A] clusters scale to billions of documents, [B]terabytes[A] of data, and billions of queries per month.";}i:8;s:3:"*i*";i:9;a:1:{i:0;s:125:"[B]Sphinx[A] clusters scale to [B]billions[A] of documents, terabytes of data, and [B]billions[A] of [B]queries[A] per month.";}i:10;s:4:"*on*";i:11;a:1:{i:0;s:119:"Sphinx clusters scale to [B]billions[A] of documents, terabytes of data, and [B]billions[A] of queries per [B]month[A].";}i:12;s:2:"*s";i:13;a:1:{i:0;s:137:"Sphinx [B]clusters[A] scale to [B]billions[A] of [B]documents[A], [B]terabytes[A] of data, and [B]billions[A] of [B]queries[A] per month.";}i:14;s:16:""clusters scale"";i:15;a:1:{i:0;s:107:"Sphinx [B]clusters scale[A] to billions of documents, terabytes of data, and billions of queries per month.";}i:16;s:23:""clusters do not scale"";i:17;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:18;s:7:""of d*"";i:19;a:1:{i:0;s:113:"Sphinx clusters scale to billions [B]of documents[A], terabytes [B]of data[A], and billions of queries per month.";}i:20;s:18:"terabyte* << quer*";i:21;a:1:{i:0;s:113:"Sphinx clusters scale to billions of documents, [B]terabytes[A] of data, and billions of [B]queries[A] per month.";}i:22;s:17:"data << terabyte*";i:23;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:24;s:16:""sphinx scale"~3";i:25;a:1:{i:0;s:113:"[B]Sphinx[A] clusters [B]scale[A] to billions of documents, terabytes of data, and billions of queries per month.";}i:26;s:19:""sphinx billions"~3";i:27;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:28;s:19:""silly documents"/1";i:29;a:1:{i:0;s:107:"Sphinx clusters scale to billions of [B]documents[A], terabytes of data, and billions of queries per month.";}i:30;s:28:""clusters scale to billions"";i:31;a:1:{i:0;s:107:"Sphinx [B]clusters scale to billions[A] of documents, terabytes of data, and billions of queries per month.";}i:32;s:41:""queries per month" | month | "per month"";i:33;a:1:{i:0;s:107:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of [B]queries per month[A].";}i:34;s:19:""of d*" | "of data"";i:35;a:1:{i:0;s:113:"Sphinx clusters scale to billions [B]of documents[A], terabytes [B]of data[A], and billions of queries per month.";}i:36;s:25:""of data" -"of hedgedogs"";i:37;a:1:{i:0;s:107:"Sphinx clusters scale to billions of documents, terabytes [B]of data[A], and billions of queries per month.";}i:38;s:21:""documents terabytes"";i:39;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:40;s:13:"@title sphinx";i:41;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:42;s:12:"@text sphinx";i:43;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:44;s:15:"@text[3] sphinx";i:45;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:46;s:18:"@text[3] documents";i:47;a:1:{i:0;s:101:"Sphinx clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:48;s:18:"@text[7] documents";i:49;a:1:{i:0;s:107:"Sphinx clusters scale to billions of [B]documents[A], terabytes of data, and billions of queries per month.";}i:50;s:6:"SPHINX";i:51;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:52;s:4:"SPH*";i:53;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:54;s:5:"*PHI*";i:55;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}i:56;s:4:"*INX";i:57;a:1:{i:0;s:107:"[B]Sphinx[A] clusters scale to billions of documents, terabytes of data, and billions of queries per month.";}}}}sphinx-2.0.4-release/test/test_025/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_025/test.xml0000644000176700017710000000471111004433405017765 0ustar deogardeogar min_word_len vs queries (part 3) indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 exceptions = synonyms.txt enable_star = 1 min_word_len = 3 min_word_len = 4 min_word_len = 5 min_prefix_len = 3 min_prefix_len = 4 min_prefix_len = 5 } a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* a bb ccc dddd eeeee ffffff ggggggg b* cc* ddd* eeee* fffff* gggggg* hello me world hello two world hello four world hello me* world hello two* world hello four* world CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'a' ), ( 2, 'bb' ), ( 3, 'ccc' ), ( 4, 'dddd' ), ( 5, 'eeeee' ), ( 6, 'ffffff' ), ( 7, 'ggggggg' ), ( 8, 'hello world' ), ( 9, 'hello a world' ), ( 10, 'hello aa world' ), ( 11, 'hello aaa world' ), ( 12, 'hello aaaa world' ), ( 13, 'hello aaaaa world' ), ( 14, 'hello me world' ), ( 15, 'hello two world' ), ( 16, 'hello four world' ) sphinx-2.0.4-release/test/test_025/model.bin0000644000176700017710000032033111004433405020055 0ustar deogardeogara:9:{i:0;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:1;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:2;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:3;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:4;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:5;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:6;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"ccc";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"two";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:11:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:15;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:7;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dddd";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"four";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:11:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:16;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}i:8;a:32:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:3;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:13;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"a";}i:14;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"bb";}i:15;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"ccc";}i:16;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:4:"dddd";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"eeeee";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"eeeee";}i:18;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"ffffff";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ffffff";}i:19;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"ggggggg";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"ggggggg";}i:20;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'b*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"b*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"b*";}i:21;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'cc*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"cc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"cc*";}i:22;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'ddd*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"ddd*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"ddd*";}i:23;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'eeee*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"eeee*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"eeee*";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"fffff*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"fffff*";}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"gggggg*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:7:"gggggg*";}i:26;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:14:"hello me world";}i:27;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello two world";}i:28;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:9;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello four world";}i:29;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'me*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:3:"me*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:15:"hello me* world";}i:30;a:10:{s:5:"error";s:0:"";s:7:"warning";s:63:"Query word length is less than min prefix length. word: 'two*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:4:"two*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:16:"hello two* world";}i:31;a:10:{s:5:"error";s:0:"";s:7:"warning";s:64:"Query word length is less than min prefix length. word: 'four*' ";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:5:"hello";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}s:5:"four*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"world";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:17:"hello four* world";}}}sphinx-2.0.4-release/test/test_048/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_048/test.xml0000644000176700017710000000232511102563613017775 0ustar deogardeogar @custom sorting indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query_pre = SET time_zone='+0:00' sql_query = SELECT id, group_id, UNIX_TIMESTAMP(date_added) as date_added, text FROM test_table sql_attr_uint = group_id sql_attr_timestamp = date_added } index test { source = test path = /test docinfo = extern } create table test_table ( id int not null key auto_increment, group_id int not null, date_added date not null, text varchar(255) not null default 'text' ); drop table if exists test_table; insert into test_table (group_id, date_added) values ( 10, '2008-10-01' ), ( 20, '2008-10-01' ), ( 30, '2008-10-01' ), ( 1, '2007-10-01' ), ( 1, '2007-10-02' ), ( 1, '2007-10-03' ); group_id date_added text text sphinx-2.0.4-release/test/test_048/model.bin0000644000176700017710000000373411102563613020073 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:10:"date_added";i:2;}s:7:"matches";a:6:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"30";s:10:"date_added";s:10:"1222819200";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"20";s:10:"date_added";s:10:"1222819200";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"10";s:10:"date_added";s:10:"1222819200";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191369600";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191283200";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191196800";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}}s:5:"query";s:4:"text";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:10:"date_added";i:2;}s:7:"matches";a:6:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"30";s:10:"date_added";s:10:"1222819200";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"20";s:10:"date_added";s:10:"1222819200";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:2:"10";s:10:"date_added";s:10:"1222819200";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191369600";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191283200";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1191196800";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"6";}}s:5:"query";s:4:"text";}}}sphinx-2.0.4-release/test/test_091/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_091/test.xml0000644000176700017710000000110411421075337017772 0ustar deogardeogar RT: aggregate functions indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_attr_uint = idd rt_attr_uint = grp rt_field = content } insert into test (id,idd,grp,content) values (1,5,4,'content'),(2,2,4,'you') select * from test select max(idd) as mx from test group by grp sphinx-2.0.4-release/test/test_091/model.bin0000644000176700017710000000114011455516446020073 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:2:{s:8:"sphinxql";s:76:"insert into test (id,idd,grp,content) values (1,5,4,'content'),(2,2,4,'you')";s:14:"total_affected";i:2;}i:1;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"5";s:3:"grp";s:1:"4";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:3:"grp";s:1:"4";}}}i:2;a:3:{s:8:"sphinxql";s:44:"select max(idd) as mx from test group by grp";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"5";s:2:"mx";s:1:"5";}}}}}sphinx-2.0.4-release/test/test_142/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_142/test.xml0000644000176700017710000000521711663131536020002 0ustar deogardeogar RT on *nix: watchdog and replaying binlog searchd { workers = threads binlog_path = binlog_path = data } index crc { type = rt path = data/crc charset_type = utf-8 rt_field = content rt_attr_uint = idd dict = crc } index kw { type = rt path = data/kw charset_type = utf-8 rt_field = content rt_attr_uint = idd dict = keywords min_prefix_len = 1 } $value) { $results[] = $key . " => " . $value; } } } '); $sock = $make_sock( $results ); if ( $sock === false ) { return; } // regression // binlog crashes on replayng commit with delete only statement $query ( $results, "DELETE FROM kw WHERE id=1000", $sock); $query ( $results, "INSERT INTO crc VALUES (1, 'test work', 11)", $sock); $query ( $results, "INSERT INTO kw VALUES (100, 'test stuff', 111)", $sock); $query ( $results, "SELECT * FROM crc,kw WHERE MATCH('test')", $sock ); $query ( $results, "SELECT * FROM crc,kw", $sock ); $results[]= "killing"; KillSearchd ('config.conf', 'searchd.pid',9,false); StartSearchd ( "config.conf", "error.txt", 'searchd.pid', $error, true ); $results[] = "connecting again"; $sock = $make_sock( $results ); if ( $sock === false ) { return; } $query ( $results, "SELECT * FROM crc,kw WHERE MATCH('test')", $sock ); $query ( $results, "SELECT * FROM crc,kw", $sock ); ]]> sphinx-2.0.4-release/test/test_142/model.bin0000644000176700017710000000356411663131536020076 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:21:{i:0;s:53:"query: DELETE FROM kw WHERE id=1000; total_affected:0";i:1;s:68:"query: INSERT INTO crc VALUES (1, 'test work', 11); total_affected:1";i:2;s:71:"query: INSERT INTO kw VALUES (100, 'test stuff', 111); total_affected:1";i:3;s:61:"query: SELECT * FROM crc,kw WHERE MATCH('test'); total_rows:2";i:4;s:7:"id => 1";i:5;s:14:"weight => 1500";i:6;s:9:"idd => 11";i:7;s:9:"id => 100";i:8;s:14:"weight => 1500";i:9;s:10:"idd => 111";i:10;s:41:"query: SELECT * FROM crc,kw; total_rows:2";i:11;s:7:"id => 1";i:12;s:11:"weight => 1";i:13;s:9:"idd => 11";i:14;s:9:"id => 100";i:15;s:11:"weight => 1";i:16;s:10:"idd => 111";i:17;s:7:"killing";i:18;s:16:"connecting again";i:19;s:61:"query: SELECT * FROM crc,kw WHERE MATCH('test'); total_rows:0";i:20;s:41:"query: SELECT * FROM crc,kw; total_rows:0";}}i:1;a:1:{i:0;a:33:{i:0;s:53:"query: DELETE FROM kw WHERE id=1000; total_affected:0";i:1;s:68:"query: INSERT INTO crc VALUES (1, 'test work', 11); total_affected:1";i:2;s:71:"query: INSERT INTO kw VALUES (100, 'test stuff', 111); total_affected:1";i:3;s:61:"query: SELECT * FROM crc,kw WHERE MATCH('test'); total_rows:2";i:4;s:7:"id => 1";i:5;s:14:"weight => 1500";i:6;s:9:"idd => 11";i:7;s:9:"id => 100";i:8;s:14:"weight => 1500";i:9;s:10:"idd => 111";i:10;s:41:"query: SELECT * FROM crc,kw; total_rows:2";i:11;s:7:"id => 1";i:12;s:11:"weight => 1";i:13;s:9:"idd => 11";i:14;s:9:"id => 100";i:15;s:11:"weight => 1";i:16;s:10:"idd => 111";i:17;s:7:"killing";i:18;s:16:"connecting again";i:19;s:61:"query: SELECT * FROM crc,kw WHERE MATCH('test'); total_rows:2";i:20;s:7:"id => 1";i:21;s:14:"weight => 1500";i:22;s:9:"idd => 11";i:23;s:9:"id => 100";i:24;s:14:"weight => 1500";i:25;s:10:"idd => 111";i:26;s:41:"query: SELECT * FROM crc,kw; total_rows:2";i:27;s:7:"id => 1";i:28;s:11:"weight => 1";i:29;s:9:"idd => 11";i:30;s:9:"id => 100";i:31;s:11:"weight => 1";i:32;s:10:"idd => 111";}}}sphinx-2.0.4-release/test/test_006/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_006/test.xml0000644000176700017710000000400310744717627020003 0ustar deogardeogar prefix_fields/infix_fields (part 3) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 enable_star = 1 min_infix_len = 0 min_prefix_len = 0 prefix_fields = prefix_fields = subject prefix_fields = body prefix_fields = body, author infix_fields = infix_fields = SUBJECT infix_fields = body infix_fields = subject, AUTHOR } enab* grea* Mak* *ble* *thor* *oda* CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_006/model.bin0000644000176700017710000007447510725372373020112 0ustar deogardeogara:16:{i:0;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:1;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:2;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:3;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:4;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:5;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:6;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:7;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:8;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:9;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.015";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:10;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:11;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:12;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:13;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:14;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:15;a:6:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"enab";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"enab*";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"grea";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mak";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"ble";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"thor";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"oda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}}sphinx-2.0.4-release/test/test_180/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_180/test.xml0000644000176700017710000000472511577310312020002 0ustar deogardeogar rt MVA searchd { workers = threads } index rt1 { type = rt docinfo = extern charset_type = sbcs path = /rt1 rt_attr_uint = idd rt_attr_string = str1 rt_attr_multi = mva1 rt_attr_multi = mva2 rt_field = body } insert into rt1 (id, idd, str1, mva1, body) values ( 1, 1, '', ( 100, 10, 1), 'the' ) insert into rt1 (id, idd, str1, mva2, body) values ( 2, 2, '', ( 11, 13, 12), 'the' ) select * from rt1 where mva1>12 select * from rt1 where match ('the') and mva1>12 select * from rt1 where mva1=10 select * from rt1 where match ('the') and mva1=10 select * from rt1 where mva1!=100 select * from rt1 where match ('the') and mva1!=100 insert into rt1 (id, idd, str1, mva1, mva2, body) values ( 3, 3, '', ( 200, 201, 210), (300, 301, 310), 'the' ) select * from rt1 order by mva1 asc select * from rt1 where match ('the') order by mva1 asc select * from rt1 order by mva1 desc select * from rt1 where match ('the') order by mva1 desc select * from rt1 group by mva1 within group order by mva2 desc select * from rt1 where match ('the') group by mva1 within group order by mva2 desc insert into rt1 (id, idd, str1, mva1, mva2, body) values ( 4, 4, '', ( 200, 210), (100, 101, 102), 'the' ) select *, count (*) as c from rt1 group by mva1 within group order by mva2 desc select *, count (*) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc select *, count (distinct mva2) as c from rt1 group by mva1 within group order by mva2 desc select *, count (*) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc select *, count (distinct mva1) as c from rt1 group by mva1 within group order by mva2 desc select *, count (distinct mva2) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc sphinx-2.0.4-release/test/test_180/model.bin0000644000176700017710000003456511577310312020100 0ustar deogardeogara:1:{i:0;a:22:{i:0;a:2:{s:8:"sphinxql";s:85:"insert into rt1 (id, idd, str1, mva1, body) values ( 1, 1, '', ( 100, 10, 1), 'the' )";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:85:"insert into rt1 (id, idd, str1, mva2, body) values ( 2, 2, '', ( 11, 13, 12), 'the' )";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:31:"select * from rt1 where mva1>12";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}}}i:3;a:3:{s:8:"sphinxql";s:50:"select * from rt1 where match ('the') and mva1>12";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1356";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}}}i:4;a:3:{s:8:"sphinxql";s:31:"select * from rt1 where mva1=10";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}}}i:5;a:3:{s:8:"sphinxql";s:50:"select * from rt1 where match ('the') and mva1=10";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1356";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}}}i:6;a:3:{s:8:"sphinxql";s:33:"select * from rt1 where mva1!=100";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}}}i:7;a:3:{s:8:"sphinxql";s:52:"select * from rt1 where match ('the') and mva1!=100";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1356";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}}}i:8;a:2:{s:8:"sphinxql";s:112:"insert into rt1 (id, idd, str1, mva1, mva2, body) values ( 3, 3, '', ( 200, 201, 210), (300, 301, 310), 'the' )";s:14:"total_affected";i:1;}i:9;a:3:{s:8:"sphinxql";s:35:"select * from rt1 order by mva1 asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}i:1;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";}}}i:10;a:3:{s:8:"sphinxql";s:55:"select * from rt1 where match ('the') order by mva1 asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}i:1;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";}}}i:11;a:3:{s:8:"sphinxql";s:36:"select * from rt1 order by mva1 desc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";}i:1;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}i:2;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}}}i:12;a:3:{s:8:"sphinxql";s:56:"select * from rt1 where match ('the') order by mva1 desc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";}i:1;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";}i:2;a:6:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"2";s:4:"str1";s:0:"";s:4:"mva1";s:0:"";s:4:"mva2";s:8:"11,12,13";}}}i:13;a:3:{s:8:"sphinxql";s:63:"select * from rt1 group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}i:3;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"1";}i:4;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";}i:5;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"1";}}}i:14;a:3:{s:8:"sphinxql";s:83:"select * from rt1 where match ('the') group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}i:3;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"1";}i:4;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";}i:5;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"1";}}}i:15;a:2:{s:8:"sphinxql";s:107:"insert into rt1 (id, idd, str1, mva1, mva2, body) values ( 4, 4, '', ( 200, 210), (100, 101, 102), 'the' )";s:14:"total_affected";i:1;}i:16;a:3:{s:8:"sphinxql";s:79:"select *, count (*) as c from rt1 group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}i:3;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";}i:4;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";}i:5;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";}}}i:17;a:3:{s:8:"sphinxql";s:99:"select *, count (*) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}i:3;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";}i:4;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";}i:5;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";}}}i:18;a:3:{s:8:"sphinxql";s:91:"select *, count (distinct mva2) as c from rt1 group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:1;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:2;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:3;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:4;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}i:5;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}}i:19;a:3:{s:8:"sphinxql";s:99:"select *, count (*) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";}i:1;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}i:2;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";}i:3;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";}i:4;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";}i:5;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";}}}i:20;a:3:{s:8:"sphinxql";s:91:"select *, count (distinct mva1) as c from rt1 group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:1;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:2;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:3;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:4;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}i:5;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"1";}}}i:21;a:3:{s:8:"sphinxql";s:111:"select *, count (distinct mva2) as c from rt1 where match ('the') group by mva1 within group order by mva2 desc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:3:"100";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:1;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:2;a:9:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"1";s:4:"str1";s:0:"";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:0:"";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:3;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"201";s:6:"@count";s:1:"1";s:9:"@distinct";s:1:"1";}i:4;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"210";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}i:5;a:9:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1304";s:3:"idd";s:1:"3";s:4:"str1";s:0:"";s:4:"mva1";s:11:"200,201,210";s:4:"mva2";s:11:"300,301,310";s:8:"@groupby";s:3:"200";s:6:"@count";s:1:"2";s:9:"@distinct";s:1:"2";}}}}}sphinx-2.0.4-release/test/test_131/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_131/test.xml0000644000176700017710000000220011417531067017765 0ustar deogardeogar 0 hit vs 0+ documents indexer { mem_limit = 16M } searchd { } source src0doc { type = mysql sql_query = SELECT id, gid, title FROM test_table where id=111 sql_attr_uint = gid } index test0doc { source = src0doc path = /test0doc charset_type = utf-8 docinfo = extern } source src1doc { type = mysql sql_query = SELECT id, gid, title FROM test_table where id=11 sql_attr_uint = gid } index test1doc { source = src1doc path = /test1doc charset_type = utf-8 docinfo = extern } the CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `gid` int(11) NOT NULL default '0', `title` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 11, 1011, '' ) sphinx-2.0.4-release/test/test_131/model.bin0000644000176700017710000000114711417531067020067 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"the";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_001/0000755000176700017710000000000011724063141016260 5ustar deogardeogarsphinx-2.0.4-release/test/test_001/test.xml0000644000176700017710000000370210744717627020003 0ustar deogardeogar prefix/infix indexing (part 1) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 min_prefix_len = 0 min_prefix_len = 1 min_prefix_len = 3 min_infix_len = 0 min_infix_len = 1 min_infix_len = 3 enable_star = 0 enable_star = 1 } admin *earc* up* dmin rep pda CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_001/model.bin0000644000176700017710000005237710723664234020100 0ustar deogardeogara:18:{i:0;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:1;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:2;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:3;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}}i:7;a:1:{i:0;s:6:"failed";}i:8;a:1:{i:0;s:6:"failed";}i:9;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:10;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:11;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:12;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:13;a:1:{i:0;s:6:"failed";}i:14;a:1:{i:0;s:6:"failed";}i:15;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}}i:16;a:1:{i:0;s:6:"failed";}i:17;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/test_047/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_047/test.xml0000644000176700017710000000263711521500542017777 0ustar deogardeogar id64 indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT id, n, text FROM test_table; sql_attr_uint = n } index test { source = test path = /test docinfo = extern } CREATE TABLE test_table ( id BIGINT UNSIGNED NOT NULL, n INT NOT NULL DEFAULT 42, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (id, text) VALUES ( 50000000000, 'five' ), ( 50000000000, 'dup' ), ( 50000000001, 'five+1' ), ( 60000000000, 'six' ), ( 60000000001, 'six+1' ), ( 9223372036854775806, 'mid -1' ), ( 9223372036854775807, 'mid' ), ( 9223372036854775808, 'mid +1' ), ( 18446744073709551613, 'last' ), ( 18446744073709551614, 'last' ), ( 18446744073709551615, 'max (wont make it)' ); sel @groupby @count five last max mid sphinx-2.0.4-release/test/test_047/model.bin0000644000176700017710000001256311547647352020112 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:2:{s:11:"50000000000";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:11:"50000000001";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"five";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"five";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:2:{s:20:"18446744073709551613";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:20:"18446744073709551614";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"last";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"last";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"max";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"max";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:3:{s:19:"9223372036854775806";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:19:"9223372036854775807";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:19:"9223372036854775808";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"mid";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"mid";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:3:{s:11:"50000000000";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:11:"50000000001";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:11:"60000000000";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:5:{s:19:"9223372036854775806";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:19:"9223372036854775807";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:19:"9223372036854775808";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:20:"18446744073709551613";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}s:20:"18446744073709551614";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"n";i:1;}s:7:"matches";a:1:{s:19:"9223372036854775808";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"n";s:2:"42";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"n";i:1;s:3:"sel";i:6;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:9:{s:19:"9223372036854775807";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:19:"9223372036854775807";s:8:"@groupby";s:19:"9223372036854775807";s:6:"@count";s:1:"1";}}s:19:"9223372036854775806";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:19:"9223372036854775806";s:8:"@groupby";s:19:"9223372036854775806";s:6:"@count";s:1:"1";}}s:11:"60000000001";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:11:"60000000001";s:8:"@groupby";s:11:"60000000001";s:6:"@count";s:1:"1";}}s:11:"60000000000";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:11:"60000000000";s:8:"@groupby";s:11:"60000000000";s:6:"@count";s:1:"1";}}s:11:"50000000001";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:11:"50000000001";s:8:"@groupby";s:11:"50000000001";s:6:"@count";s:1:"1";}}s:11:"50000000000";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:11:"50000000000";s:8:"@groupby";s:11:"50000000000";s:6:"@count";s:1:"1";}}s:20:"18446744073709551614";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";i:-2;s:8:"@groupby";i:-2;s:6:"@count";s:1:"1";}}s:20:"18446744073709551613";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";i:-3;s:8:"@groupby";i:-3;s:6:"@count";s:1:"1";}}s:19:"9223372036854775808";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"n";s:2:"42";s:3:"sel";s:20:"-9223372036854775808";s:8:"@groupby";s:20:"-9223372036854775808";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"9";s:11:"total_found";s:1:"9";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_051/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_051/test.xml0000644000176700017710000001210511565144030017765 0ustar deogardeogar snippets vs exact_phrase + snippets vs passage duplication searchd { } source src { type = mysql sql_query = SELECT 1, 'text'; } index idx { source = src path = /test } index exact { source = src path = /exact morphology = stem_en index_exact_words = 1 blend_chars = (, ), - blend_mode = trim_none, trim_head, trim_tail, trim_both, skip_pure } select 1; BuildExcerpts ( array($text), 'idx', $query, array('exact_phrase' => true) ); $results [] = $query; $results [] = $reply; } $text = 'A native of Honolulu, Hawaii, Obama is a graduate of Columbia University and Harvard Law School, where he waas the president of the Harvard Law Review. He was a community organizer in Chicago before earning his law degree. He worrked as a civil rights attorney in Chicago and taught constitutional law at the University of Chicago Law School from 19992 to 2004.Obama served three terms in the Illinois Senate from 1997 to 2004.'; $query = '"University and Harvard Law School" obama'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query ,array('query_mode'=>true, 'around'=>2) ); $results [] = $query; $results [] = $reply; $text = 'This is a large house. Its a doggy house. The doggy house is most doggy here. There is no any doggy house around.'; $query = '"the doggy house"'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('query_mode'=>true, 'around'=> 2, 'weight_order'=>true, 'limit_words'=>20) ); $results [] = $query; $results [] = $reply; $query = 'the doggy house'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array( 'around'=> 3, 'limit'=>100, 'limit_passages'=>1 ) ); $results [] = $query; $results [] = $reply; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array( 'around'=> 3, 'exact_phrase'=>true, 'limit'=>100, 'limit_passages'=>1 ) ); $results [] = $query; $results [] = $reply; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 3, 'limit_words'=>6, 'exact_phrase'=>true) ); $results [] = $query; $results [] = $reply; $query = 'is most'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 2, 'limit_words'=>4) ); $results [] = $query; $results [] = $reply; $query = 'is house'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 2, 'before_match'=>'', 'after_match'=>'<%PASSAGE_ID% ends>') ); $results [] = $query; $results [] = $reply; $query = 'is house'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 2, 'limit_words'=>10, 'before_match'=>'%PASSAGE_ID% !-! ', 'after_match'=>' !-! %PASSAGE_ID%', 'start_passage_id'=>1000) ); $results [] = $query; $results [] = $reply; $query = 'is house'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 2, 'limit_words'=>10, 'before_match'=>'', 'after_match'=>'', 'start_passage_id'=>1000) ); $results [] = $query; $results [] = $reply; $query = 'is house'; $reply = $client->BuildExcerpts ( array($text), 'idx', $query, array('around'=> 2, 'limit_words'=>10, 'before_match'=>'', 'after_match'=>'', 'start_passage_id'=>1000) ); $results [] = $query; $results [] = $reply; $query = '=welcome'; $reply = $client->BuildExcerpts ( array('=welcome', '=welcome'), 'exact', $query, array('query_mode'=>1) ); $results [] = "crash on exact words and 2 documents with '=' symbol"; $results [] = $reply; $query = ' =\(12b\-1\) '; $docs = array ( 'Distribution and Service (12b-1) fees' ); $opts =array ( 'query_mode'=>1, 'limit'=>15, 'around'=>2, 'allows_empty'=>1 ); $reply = $client->BuildExcerpts ( $docs, 'exact', $query, $opts ); $results [] = 'exact-blened: plain path'; $results [] = $reply; $opts =array ( 'query_mode'=>1, 'limit'=>0, 'around'=>0, 'allows_empty'=>1 ); $reply = $client->BuildExcerpts ( $docs, 'exact', $query, $opts ); $results [] = 'exact-blened: fast path'; $results [] = $reply; ]]> sphinx-2.0.4-release/test/test_051/model.bin0000644000176700017710000000473211565144030020065 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:30:{i:0;s:15:"cajuput invalid";i:1;a:1:{i:0;s:244:" ... whirligig INVALID three pilotfish promontory CAJUPUT INVALID CAJUPUT INVALID EXPOSITOR whirligig felspar disposition pilotfish ... humanism detestable promontory comforter chubby CAJUPUT INVALID humanism CAJUPUT";}i:2;s:25:"cajuput invalid expositor";i:3;a:1:{i:0;s:136:" ... three pilotfish promontory CAJUPUT INVALID CAJUPUT INVALID EXPOSITOR whirligig felspar disposition pilotfish chubby ... ";}i:4;s:41:""University and Harvard Law School" obama";i:5;a:1:{i:0;s:181:" ... Honolulu, Hawaii, Obama is a ... of Columbia University and Harvard Law School, where he ... to 2004.Obama served three ... ";}i:6;s:17:""the doggy house"";i:7;a:1:{i:0;s:68:" ... doggy house. The doggy house is most ... ";}i:8;s:15:"the doggy house";i:9;a:1:{i:0;s:126:" ... large house. Its a doggy house. The doggy house is most doggy here ... ";}i:10;s:15:"the doggy house";i:11;a:1:{i:0;s:62:" ... a doggy house. The doggy house is most doggy ... ";}i:12;s:15:"the doggy house";i:13;a:1:{i:0;s:48:" ... house. The doggy house is most ... ";}i:14;s:7:"is most";i:15;a:1:{i:0;s:73:" ... house is most doggy ... . There is no any ... ";}i:16;s:8:"is house";i:17;a:1:{i:0;s:253:"This is<1 ends> a large house<2 ends>. Its a doggy house<3 ends>. The doggy house<4 ends> is<5 ends> most doggy here. There is<6 ends> no any doggy house<7 ends> around.";}i:18;s:8:"is house";i:19;a:1:{i:0;s:140:" ... doggy 1000 !-! house !-! 1000. The doggy 1001 !-! house !-! 1001 1002 !-! is !-! 1002 most doggy here. There 1003 !-! is !-! 1003 ... ";}i:20;s:8:"is house";i:21;a:1:{i:0;s:116:" ... doggy house. The doggy house is most doggy here. There is ... ";}i:22;s:8:"is house";i:23;a:1:{i:0;s:116:" ... doggy house. The doggy house is most doggy here. There is ... ";}i:24;s:52:"crash on exact words and 2 documents with '=' symbol";i:25;a:2:{i:0;s:15:"=welcome";i:1;s:15:"=welcome";}i:26;s:24:"exact-blened: plain path";i:27;a:1:{i:0;s:32:" ... (12b-1) fees";}i:28;s:23:"exact-blened: fast path";i:29;a:1:{i:0;s:44:"Distribution and Service (12b-1) fees";}}}}sphinx-2.0.4-release/test/test_035/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_035/test.xml0000644000176700017710000000170311055570774020005 0ustar deogardeogar infixes vs stopwords indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 stopwords = stopwords.txt enable_star = 1 min_prefix_len = 3 min_infix_len = 3 } the the* *the* CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'their' ), ( 2, 'theta' ), ( 3, 'xthex' ) sphinx-2.0.4-release/test/test_035/model.bin0000644000176700017710000000445311302676725020101 0ustar deogardeogara:2:{i:0;a:3:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"the";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"the*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"the*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"*the*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*the*";}}i:1;a:3:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"the";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"the*";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"the*";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:5:"*the*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"*the*";}}}sphinx-2.0.4-release/test/test_096/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_096/test.xml0000644000176700017710000000761311634664540020020 0ustar deogardeogar attribute updates indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, make_id, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section sql_attr_uint = make_id sql_attr_uint = transmission_id } index idx { source = src path = /main charset_type = utf-8 docinfo = extern } UpdateAttributes ( "idx", array("section", "make_id"), array(1=>array(4, 15), 3=>array(221, 31))); if ( $up >= 0 ) $results[] = sprintf("up.ok=%d", $up); else $results[] = sprintf("up.err=%s", $client->GetLastError()); StopSearchd ( 'config.conf', 'searchd.pid' ); usleep ( 50000 ); $error = ""; $startSta = StartSearchd ( 'config.conf', 'error.txt', 'searchd.pid', $error ); if ( $startSta == 0 || $startSta == 2 ) { $results[] = "started=ok"; } else $results[] = sprintf("start.err=%d local=%s client=%s", $startSta, $error, $client->GetLastError()); } // find the updated match $results[] = $client->Query ( "Corsa4" ); // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `id` int(11) NOT NULL auto_increment, `section` int(11) NOT NULL, `system_id` tinyint(4) NOT NULL, `adtext` varchar(255) NOT NULL, `heading` varchar(500) NOT NULL, `price` int(11) NOT NULL default '0', `postcode` varchar(10) NOT NULL, `gre` int(11) NOT NULL, `grn` int(11) NOT NULL, `str_at1` varchar(255) NOT NULL, `str_at2` varchar(255) NOT NULL, `str_at3` varchar(255) NOT NULL, `str_at4` varchar(255) NOT NULL, `str_at5` varchar(255) NOT NULL, `int_at1` int(11) default NULL, `int_at2` int(11) default NULL, `int_at3` int(11) default NULL, `int_at4` int(11) default NULL, `int_at5` int(11) default NULL, `float_at1` float default NULL, `float_at2` float default NULL, `float_at3` float default NULL, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', `make_id` int(11) NOT NULL, `transmission_id` tinyint(4) NOT NULL, PRIMARY KEY (`id`) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `section`, `system_id`, `adtext`, `heading`, `price`, `postcode`, `gre`, `grn`, `str_at1`, `str_at2`, `str_at3`, `str_at4`, `str_at5`, `int_at1`, `int_at2`, `int_at3`, `int_at4`, `int_at5`, `float_at1`, `float_at2`, `float_at3`, `lng`, `lat`, `make_id`, `transmission_id`) VALUES (1, 1, 2, 'FORD', 'Ford KA', 2790, 'EN3 5BT', 535000, 197400, 'Ford', 'KA', 'Grey', 'Diesel', '', 18662, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.1798578, 0.937717, 8, 1), (2, 1, 3, 'FORDE', 'Vauxhall Corsa1', 5800, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.2799989, 0.891975, 7, 2), (3, 1, 4, 'FORDZ', 'Vauxhall Corsa2', 5200, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.3799989, 0.791975, 6, 3), (4, 1, 5, 'FORDT', 'Vauxhall Corsa4', 3800, 'BN42 4N', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.4799989, 0.691975, 5, 4), (211250, 0, 1, 'Quattro Roadster', 'Audi TT', 13995, 'E9 7DG', 535600, 184200, '', '', '', '', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.5721455, 0.526761, 29, 5); sphinx-2.0.4-release/test/test_096/model.bin0000644000176700017710000000153711305012665020076 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:10:"started=ok";i:3;s:11:"iteration=1";i:4;s:7:"up.ok=2";i:5;s:10:"started=ok";i:6;s:11:"iteration=2";i:7;s:7:"up.ok=2";i:8;s:10:"started=ok";i:9;s:11:"iteration=3";i:10;s:7:"up.ok=2";i:11;s:10:"started=ok";i:12;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:5:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:7:"make_id";i:1;s:15:"transmission_id";i:1;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.479999005794525146484375;s:3:"lat";d:0.69197499752044677734375;s:7:"section";i:1;s:7:"make_id";i:5;s:15:"transmission_id";i:4;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:6:"corsa4";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_146/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_146/stopwords.txt0000644000176700017710000000001111444636030021071 0ustar deogardeogarnot as sphinx-2.0.4-release/test/test_146/test.xml0000644000176700017710000000473211444636030020003 0ustar deogardeogar joined fields indexing indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select id, text, idd from test; sql_joined_field = text from query; select id, text from test_joined; sql_attr_uint = idd } index test { source = test path = /test charset_type = utf-8 docinfo = extern phrase_boundary = . phrase_boundary_step = 2 stopwords = test_146/stopwords.txt } create table test ( id int, text varchar(255), idd int ); create table test_joined ( id int, text varchar(255) ); drop table if exists test drop table if exists test_joined insert into test (id, text, idd) values ( 1, 'aaa', 1 ), ( 2, 'aaa bbb', 2 ), ( 3, 'bbb ccc', 3 ); insert into test_joined (id, text) values ( 1, 'jjj kkk' ), ( 1, 'zzz. my' ), ( 1, 'cool' ), ( 2, 'yyy' ), ( 2, 'ttt' ), ( 3, 'ccc do. dog' ), ( 3, 'sleepy' ); aaa bbb aaa | bbb "aaa bbb" ( kkk zzz ) | "do dog" ( kkk zzz ) | "do not as dog" "kkk zzz" "zzz not as not cool" "zzz do dog look cool"/2 dog not as do sleepy "dog not as do sleepy" "do not as dog sleepy" sphinx-2.0.4-release/test/test_146/model.bin0000644000176700017710000001535311444636030020075 0ustar deogardeogara:1:{i:0;a:12:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:3:"aaa";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"aaa";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:3:"bbb";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"bbb";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"bbb";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"aaa | bbb";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:1:{s:3:"idd";s:1:"2";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"aaa";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"bbb";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""aaa bbb"";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"( kkk zzz ) | "do dog"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:4:{s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:"( kkk zzz ) | "do not as dog"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2680";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.009";s:5:"words";a:2:{s:3:"kkk";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""kkk zzz"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2680";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"cool";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""zzz not as not cool"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"2572";s:5:"attrs";a:1:{s:3:"idd";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1572";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:5:"words";a:5:{s:3:"zzz";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"look";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:4:"cool";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:""zzz do dog look cool"/2";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"sleepy";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:20:"dog not as do sleepy";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"sleepy";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""dog not as do sleepy"";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:4:"text";i:1;s:4:"text";}s:5:"attrs";a:1:{s:3:"idd";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3680";s:5:"attrs";a:1:{s:3:"idd";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:5:"words";a:3:{s:2:"do";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"sleepy";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""do not as dog sleepy"";}}}sphinx-2.0.4-release/test/test_049/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_049/test.xml0000644000176700017710000000563011605620330017776 0ustar deogardeogar multi-index queries vs index weights indexer { mem_limit = 16M } searchd { } source src_a { type = mysql sql_query = SELECT id, a, b, c, text FROM test_table WHERE idx = 'a'; sql_attr_uint = a sql_attr_uint = b sql_attr_uint = c:1 } source src_delta { type = mysql sql_query = SELECT id, b, a, c, text FROM test_table WHERE idx = 'delta'; sql_attr_uint = a sql_attr_uint = b sql_attr_uint = c:1 } source src_a2 { type = mysql sql_query = SELECT id, a, b, c, text FROM test_table WHERE idx = 'a'; sql_attr_uint = a sql_attr_uint = b sql_attr_uint = c:2 } source src_b { type = mysql sql_query = SELECT id, a, b, text FROM test_table WHERE idx = 'b'; sql_attr_uint = a sql_attr_uint = b } source src_c { type = mysql sql_query = SELECT id, mva as a, text FROM test_table WHERE idx = 'c'; sql_attr_multi = uint a from field sql_attr_multi = bigint a from field } index a { source = src_a path = /a } index delta { source = src_delta path = /delta } index a2 { source = src_a2 path = /a2 } index b { source = src_b path = /b } index c { source = src_c path = /c } CREATE TABLE test_table ( id INT NOT NULL, a INT NOT NULL, b INT NOT NULL, c INT NOT NULL, mva VARCHAR(255) NOT NULL, text VARCHAR(255) NOT NULL DEFAULT 'text', idx VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (id, a, b, c, mva, idx) VALUES ( 1, 10, 20, 3, '', 'a' ), ( 2, 10, 20, 3, '', 'a' ), ( 3, 10, 20, 3, '', 'a' ), ( 4, 10, 20, 3, '', 'a' ), ( 1, 1, 21, 3, '', 'b' ), ( 2, 2, 22, 3, '', 'b' ), ( 3, 3, 23, 3, '', 'b' ), ( 4, 4, 24, 3, '', 'b' ), ( 1, 0, 0, 0, '1, 2, 3', 'c' ), ( 3, 0, 0, 0, '4', 'c' ), ( 10, 0, 0, 0, '5, 6', 'c' ), ( 1, 101, 201, 0, 5, 'delta' ), ( 2, 102, 202, 0, 5, 'delta' ), ( 12, 112, 212, 0, 5, 'delta' ); a b c text text text text text text text text text text text text text text text sphinx-2.0.4-release/test/test_049/model.bin0000644000176700017710000005340411605620330020071 0ustar deogardeogara:2:{i:0;a:15:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"b";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"1";s:1:"b";s:2:"21";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"2";s:1:"b";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"3";s:1:"b";s:2:"23";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"4";s:1:"b";s:2:"24";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"b";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"201";s:1:"a";s:3:"101";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"202";s:1:"a";s:3:"102";s:1:"c";s:1:"0";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"101";s:1:"b";s:3:"201";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"102";s:1:"b";s:3:"202";s:1:"c";s:1:"0";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"112";s:1:"b";s:3:"212";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"a";i:1073741825;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:1:{i:0;s:1:"4";}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:2:{i:0;s:1:"5";i:1;s:1:"6";}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.006";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.006";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"a";s:3:"101";s:1:"b";s:3:"201";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"a";s:3:"102";s:1:"b";s:3:"202";s:1:"c";s:1:"0";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"a";s:3:"112";s:1:"b";s:3:"212";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}}i:1;a:15:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"3";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"b";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"1";s:1:"b";s:2:"21";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"2";s:1:"b";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"3";s:1:"b";s:2:"23";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"4";s:1:"b";s:2:"24";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"b";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"201";s:1:"a";s:3:"101";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"202";s:1:"a";s:3:"102";s:1:"c";s:1:"0";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"101";s:1:"b";s:3:"201";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"102";s:1:"b";s:3:"202";s:1:"c";s:1:"0";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"112";s:1:"b";s:3:"212";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:1:{s:1:"a";i:1073741825;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:3:{i:0;s:1:"1";i:1;s:1:"2";i:2;s:1:"3";}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:1:{i:0;s:1:"4";}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:1:"a";a:2:{i:0;s:1:"5";i:1;s:1:"6";}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.006";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"b";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"a";s:3:"101";s:1:"b";s:3:"201";s:1:"c";s:1:"0";}}i:2;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"a";s:3:"102";s:1:"b";s:3:"202";s:1:"c";s:1:"0";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"b";s:2:"20";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"a";s:3:"112";s:1:"b";s:3:"212";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"b";i:1;s:1:"a";i:1;s:1:"c";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:2;a:2:{s:6:"weight";s:3:"101";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:3;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:4;a:2:{s:6:"weight";s:3:"100";s:5:"attrs";a:3:{s:1:"b";s:2:"20";s:1:"a";s:2:"10";s:1:"c";s:1:"1";}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"b";s:3:"212";s:1:"a";s:3:"112";s:1:"c";s:1:"0";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.004";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"7";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"text";}}}sphinx-2.0.4-release/test/test_100/0000755000176700017710000000000011724063141016260 5ustar deogardeogarsphinx-2.0.4-release/test/test_100/test.xml0000644000176700017710000000267211662472433020001 0ustar deogardeogar snippets vs force_all_words indexer { mem_limit = 16M } searchd { } source main { type = mysql sql_query = select 1 as id, 'body' as body; } index test_idx { source = main path = /main docinfo = extern charset_type = utf-8 min_word_len = 1 } "", "after_match" => "", "chunk_separator" => " ... ", "around" => 3 ); $results = array(); for ($limit = 4; $limit < 60; ++$limit ) foreach ( array(0,1) as $exact ) foreach ( array(0,1) as $force ) { $opts["exact_phrase"] = $exact; $opts["force_all_words"] = $force; $opts["limit"] = $limit; $rs["opts"]="phrase $exact, force $force, limit $limit"; $res = $client->BuildExcerpts ( $docs, $index, $words, $opts ); if ( !$res ) $rs["result"] = $client->GetLastError(); else $rs["result"] = $res; $results[] = $rs; } ]]> sphinx-2.0.4-release/test/test_100/model.bin0000644000176700017710000017554211607065661020101 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:224:{i:0;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 4";s:6:"result";a:4:{i:0;s:21:" ... test ... ";i:1;s:21:" ... test ... ";i:2;s:16:"test ... ";i:3;s:21:" ... test ... ";}}i:1;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 4";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:16:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:2;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 4";s:6:"result";a:4:{i:0;s:9:"this ... ";i:1;s:12:"another ... ";i:2;s:9:"test ... ";i:3;s:10:"final ... ";}}i:3;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 4";s:6:"result";a:4:{i:0;s:9:"this ... ";i:1;s:12:"another ... ";i:2;s:9:"test ... ";i:3;s:10:"final ... ";}}i:4;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 5";s:6:"result";a:4:{i:0;s:21:" ... test ... ";i:1;s:21:" ... test ... ";i:2;s:17:"test ... ";i:3;s:21:" ... test ... ";}}i:5;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 5";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:17:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:6;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 5";s:6:"result";a:4:{i:0;s:9:"this ... ";i:1;s:12:"another ... ";i:2;s:9:"test ... ";i:3;s:10:"final ... ";}}i:7;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 5";s:6:"result";a:4:{i:0;s:9:"this ... ";i:1;s:12:"another ... ";i:2;s:9:"test ... ";i:3;s:10:"final ... ";}}i:8;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 6";s:6:"result";a:4:{i:0;s:21:" ... test ... ";i:1;s:21:" ... test ... ";i:2;s:17:"test ... ";i:3;s:21:" ... test ... ";}}i:9;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 6";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:17:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:10;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 6";s:6:"result";a:4:{i:0;s:10:"this ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:10:"final ... ";}}i:11;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 6";s:6:"result";a:4:{i:0;s:10:"this ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:10:"final ... ";}}i:12;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 7";s:6:"result";a:4:{i:0;s:21:" ... test ... ";i:1;s:21:" ... test ... ";i:2;s:17:"test ... ";i:3;s:21:" ... test ... ";}}i:13;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 7";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:17:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:14;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 7";s:6:"result";a:4:{i:0;s:10:"this ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:15;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 7";s:6:"result";a:4:{i:0;s:10:"this ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:16;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 8";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:17:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:17;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 8";s:6:"result";a:4:{i:0;s:37:" ... test ... text ... ";i:1;s:37:" ... test ... text ... ";i:2;s:17:"test ... ";i:3;s:37:" ... test ... text ... ";}}i:18;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 8";s:6:"result";a:4:{i:0;s:12:"this is ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:19;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 8";s:6:"result";a:4:{i:0;s:12:"this is ... ";i:1;s:12:"another ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:20;a:2:{s:4:"opts";s:26:"phrase 0, force 0, limit 9";s:6:"result";a:4:{i:0;s:33:" ... test text ... ";i:1;s:33:" ... test text ... ";i:2;s:17:"test ... ";i:3;s:33:" ... text test ... ";}}i:21;a:2:{s:4:"opts";s:26:"phrase 0, force 1, limit 9";s:6:"result";a:4:{i:0;s:33:" ... test text ... ";i:1;s:33:" ... test text ... ";i:2;s:17:"test ... ";i:3;s:33:" ... text test ... ";}}i:22;a:2:{s:4:"opts";s:26:"phrase 1, force 0, limit 9";s:6:"result";a:4:{i:0;s:26:" ... test text ... ";i:1;s:26:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:23;a:2:{s:4:"opts";s:26:"phrase 1, force 1, limit 9";s:6:"result";a:4:{i:0;s:26:" ... test text ... ";i:1;s:26:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:24;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 10";s:6:"result";a:4:{i:0;s:34:" ... test text ... ";i:1;s:34:" ... test text ... ";i:2;s:17:"test ... ";i:3;s:34:" ... text test ... ";}}i:25;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 10";s:6:"result";a:4:{i:0;s:34:" ... test text ... ";i:1;s:34:" ... test text ... ";i:2;s:17:"test ... ";i:3;s:34:" ... text test ... ";}}i:26;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 10";s:6:"result";a:4:{i:0;s:27:" ... test text ... ";i:1;s:27:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:27;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 10";s:6:"result";a:4:{i:0;s:27:" ... test text ... ";i:1;s:27:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:11:"final ... ";}}i:28;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 11";s:6:"result";a:4:{i:0;s:34:" ... test text ... ";i:1;s:34:" ... test text ... ";i:2;s:23:"test number ... ";i:3;s:34:" ... text test ... ";}}i:29;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 11";s:6:"result";a:4:{i:0;s:34:" ... test text ... ";i:1;s:34:" ... test text ... ";i:2;s:23:"test number ... ";i:3;s:34:" ... text test ... ";}}i:30;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 11";s:6:"result";a:4:{i:0;s:27:" ... test text ... ";i:1;s:27:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:15:"final test ... ";}}i:31;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 11";s:6:"result";a:4:{i:0;s:27:" ... test text ... ";i:1;s:27:" ... test text ... ";i:2;s:10:"test ... ";i:3;s:15:"final test ... ";}}i:32;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 12";s:6:"result";a:4:{i:0;s:36:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:33;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 12";s:6:"result";a:4:{i:0;s:36:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:34;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 12";s:6:"result";a:4:{i:0;s:29:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:16:"test number ... ";i:3;s:16:"final test, ... ";}}i:35;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 12";s:6:"result";a:4:{i:0;s:29:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:16:"test number ... ";i:3;s:16:"final test, ... ";}}i:36;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 13";s:6:"result";a:4:{i:0;s:37:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:37;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 13";s:6:"result";a:4:{i:0;s:37:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:38;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 13";s:6:"result";a:4:{i:0;s:30:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:39;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 13";s:6:"result";a:4:{i:0;s:30:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:40;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 14";s:6:"result";a:4:{i:0;s:37:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:41;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 14";s:6:"result";a:4:{i:0;s:37:" ... my test text ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:42;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 14";s:6:"result";a:4:{i:0;s:30:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:43;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 14";s:6:"result";a:4:{i:0;s:30:" ... my test text ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:44;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 15";s:6:"result";a:4:{i:0;s:39:" ... my test text to ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:45;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 15";s:6:"result";a:4:{i:0;s:39:" ... my test text to ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:34:" ... text test ... ";}}i:46;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 15";s:6:"result";a:4:{i:0;s:32:" ... my test text to ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:47;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 15";s:6:"result";a:4:{i:0;s:32:" ... my test text to ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:17:"final test, ... ";}}i:48;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 16";s:6:"result";a:4:{i:0;s:40:" ... my test text to ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:40:" ... phrase text test ... ";}}i:49;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 16";s:6:"result";a:4:{i:0;s:40:" ... my test text to ... ";i:1;s:34:" ... test text ... ";i:2;s:24:"test number ... ";i:3;s:40:" ... phrase text test ... ";}}i:50;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 16";s:6:"result";a:4:{i:0;s:33:" ... my test text to ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:20:"final test, not ... ";}}i:51;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 16";s:6:"result";a:4:{i:0;s:33:" ... my test text to ... ";i:1;s:27:" ... test text ... ";i:2;s:17:"test number ... ";i:3;s:20:"final test, not ... ";}}i:52;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 17";s:6:"result";a:4:{i:0;s:40:" ... my test text to ... ";i:1;s:36:"another test text ... ";i:2;s:29:"test number three ... ";i:3;s:41:" ... phrase text test ... ";}}i:53;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 17";s:6:"result";a:4:{i:0;s:40:" ... my test text to ... ";i:1;s:36:"another test text ... ";i:2;s:29:"test number three ... ";i:3;s:41:" ... phrase text test ... ";}}i:54;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 17";s:6:"result";a:4:{i:0;s:33:" ... my test text to ... ";i:1;s:29:"another test text ... ";i:2;s:17:"test number ... ";i:3;s:21:"final test, not ... ";}}i:55;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 17";s:6:"result";a:4:{i:0;s:33:" ... my test text to ... ";i:1;s:29:"another test text ... ";i:2;s:17:"test number ... ";i:3;s:21:"final test, not ... ";}}i:56;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 18";s:6:"result";a:4:{i:0;s:42:" ... is my test text to ... ";i:1;s:37:"another test text ... ";i:2;s:30:"test number three, ... ";i:3;s:41:" ... phrase text test ... ";}}i:57;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 18";s:6:"result";a:4:{i:0;s:42:" ... is my test text to ... ";i:1;s:37:"another test text ... ";i:2;s:30:"test number three, ... ";i:3;s:41:" ... phrase text test ... ";}}i:58;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 18";s:6:"result";a:4:{i:0;s:35:" ... is my test text to ... ";i:1;s:30:"another test text ... ";i:2;s:22:"test number three ... ";i:3;s:21:"final test, not ... ";}}i:59;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 18";s:6:"result";a:4:{i:0;s:35:" ... is my test text to ... ";i:1;s:30:"another test text ... ";i:2;s:22:"test number three ... ";i:3;s:21:"final test, not ... ";}}i:60;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 19";s:6:"result";a:4:{i:0;s:43:" ... is my test text to ... ";i:1;s:37:"another test text ... ";i:2;s:31:"test number three, ... ";i:3;s:43:" ... phrase text test as ... ";}}i:61;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 19";s:6:"result";a:4:{i:0;s:43:" ... is my test text to ... ";i:1;s:37:"another test text ... ";i:2;s:31:"test number three, ... ";i:3;s:43:" ... phrase text test as ... ";}}i:62;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 19";s:6:"result";a:4:{i:0;s:36:" ... is my test text to ... ";i:1;s:30:"another test text ... ";i:2;s:23:"test number three, ... ";i:3;s:21:"final test, not ... ";}}i:63;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 19";s:6:"result";a:4:{i:0;s:36:" ... is my test text to ... ";i:1;s:30:"another test text ... ";i:2;s:23:"test number three, ... ";i:3;s:21:"final test, not ... ";}}i:64;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 20";s:6:"result";a:4:{i:0;s:43:" ... is my test text to ... ";i:1;s:39:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:65;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 20";s:6:"result";a:4:{i:0;s:43:" ... is my test text to ... ";i:1;s:39:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:66;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 20";s:6:"result";a:4:{i:0;s:36:" ... is my test text to ... ";i:1;s:32:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:21:"final test, not ... ";}}i:67;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 20";s:6:"result";a:4:{i:0;s:36:" ... is my test text to ... ";i:1;s:32:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:21:"final test, not ... ";}}i:68;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 21";s:6:"result";a:4:{i:0;s:45:" ... is my test text to be ... ";i:1;s:40:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:69;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 21";s:6:"result";a:4:{i:0;s:45:" ... is my test text to be ... ";i:1;s:40:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:70;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 21";s:6:"result";a:4:{i:0;s:38:" ... is my test text to be ... ";i:1;s:33:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:25:"final test, not only ... ";}}i:71;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 21";s:6:"result";a:4:{i:0;s:38:" ... is my test text to be ... ";i:1;s:33:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:25:"final test, not only ... ";}}i:72;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 22";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:40:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:73;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 22";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:40:"another test text to ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:74;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 22";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:33:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:75;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 22";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:33:"another test text to ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:76;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 23";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:42:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:77;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 23";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:42:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:78;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 23";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:35:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:79;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 23";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:35:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:80;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 24";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:81;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 24";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:82;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 24";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:83;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 24";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:84;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 25";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:85;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 25";s:6:"result";a:4:{i:0;s:46:" ... is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:31:"test number three, ... ";i:3;s:44:" ... phrase text test as ... ";}}i:86;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 25";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:87;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 25";s:6:"result";a:4:{i:0;s:39:" ... is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:88;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 26";s:6:"result";a:4:{i:0;s:45:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:38:"test number three, without ... ";i:3;s:44:" ... phrase text test as ... ";}}i:89;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 26";s:6:"result";a:4:{i:0;s:45:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:38:"test number three, without ... ";i:3;s:44:" ... phrase text test as ... ";}}i:90;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 26";s:6:"result";a:4:{i:0;s:38:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:91;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 26";s:6:"result";a:4:{i:0;s:38:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:24:"test number three, ... ";i:3;s:26:"final test, not only ... ";}}i:92;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 27";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:51:" ... swapped phrase text test as ... ";}}i:93;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 27";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:51:" ... swapped phrase text test as ... ";}}i:94;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 27";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:31:"test number three, without ... ";i:3;s:26:"final test, not only ... ";}}i:95;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 27";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:31:"test number three, without ... ";i:3;s:26:"final test, not only ... ";}}i:96;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 28";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:97;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 28";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:98;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 28";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:26:"final test, not only ... ";}}i:99;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 28";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:26:"final test, not only ... ";}}i:100;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 29";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:101;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 29";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:102;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 29";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:33:"final test, not only without ... ";}}i:103;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 29";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:33:"final test, not only without ... ";}}i:104;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 30";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:105;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 30";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:106;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 30";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:107;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 30";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:108;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 31";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:109;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 31";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as ... ";}}i:110;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 31";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:111;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 31";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:112;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 32";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:51:" ... swapped phrase text test as well";}}i:113;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 32";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:51:" ... swapped phrase text test as well";}}i:114;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 32";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:115;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 32";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:116;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 33";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:117;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 33";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:118;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 33";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:119;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 33";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:32:"test number three, without ... ";i:3;s:34:"final test, not only without ... ";}}i:120;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 34";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:121;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 34";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:43:"another test text to be ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:122;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 34";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:38:"test number three, without phrase ... ";i:3;s:34:"final test, not only without ... ";}}i:123;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 34";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:36:"another test text to be ... ";i:2;s:38:"test number three, without phrase ... ";i:3;s:34:"final test, not only without ... ";}}i:124;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 35";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:54:"another test text to be highlighted ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:125;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 35";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:54:"another test text to be highlighted ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:126;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 35";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:47:"another test text to be highlighted ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:34:"final test, not only without ... ";}}i:127;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 35";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:47:"another test text to be highlighted ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:34:"final test, not only without ... ";}}i:128;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 36";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:55:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:129;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 36";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:55:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:52:" ... swapped phrase text test as well";}}i:130;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 36";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:48:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:40:"final test, not only without phrase ... ";}}i:131;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 36";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:48:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:40:"final test, not only without phrase ... ";}}i:132;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 37";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:56:" ... with swapped phrase text test as well";}}i:133;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 37";s:6:"result";a:4:{i:0;s:46:"this is my test text to be ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:56:" ... with swapped phrase text test as well";}}i:134;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 37";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:135;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 37";s:6:"result";a:4:{i:0;s:39:"this is my test text to be ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:136;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 38";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:57:" ... with swapped phrase text test as well";}}i:137;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 38";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:57:" ... with swapped phrase text test as well";}}i:138;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 38";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:139;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 38";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:140;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 39";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:57:" ... with swapped phrase text test as well";}}i:141;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 39";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without ... ";i:3;s:57:" ... with swapped phrase text test as well";}}i:142;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 39";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:143;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 39";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase ... ";i:3;s:41:"final test, not only without phrase ... ";}}i:144;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 40";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:145;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 40";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:146;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 40";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:41:"final test, not only without phrase ... ";}}i:147;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 40";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:41:"final test, not only without phrase ... ";}}i:148;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 41";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:149;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 41";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:150;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 41";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:41:"final test, not only without phrase ... ";}}i:151;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 41";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:41:"final test, not only without phrase ... ";}}i:152;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 42";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:153;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 42";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:154;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 42";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:46:"final test, not only without phrase match ... ";}}i:155;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 42";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:46:"final test, not only without phrase match ... ";}}i:156;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 43";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:157;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 43";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:158;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 43";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:47:"final test, not only without phrase match, ... ";}}i:159;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 43";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:47:"final test, not only without phrase match, ... ";}}i:160;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 44";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:161;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 44";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:162;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 44";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:163;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 44";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:164;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 45";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:165;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 45";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:166;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 45";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:167;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 45";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:168;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 46";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:169;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 46";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:170;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 46";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:171;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 46";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:48:"final test, not only without phrase match, ... ";}}i:172;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 47";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:173;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 47";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:174;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 47";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:51:"final test, not only without phrase match, but ... ";}}i:175;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 47";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:51:"final test, not only without phrase match, but ... ";}}i:176;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 48";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:177;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 48";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:56:"another test text to be highlighted, ... ";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:178;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 48";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:179;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 48";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:49:"another test text to be highlighted, ... ";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:180;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 49";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:181;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 49";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:57:" ... with swapped phrase text test as well";}}i:182;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 49";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:183;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 49";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:184;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 50";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:185;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 50";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:186;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 50";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:187;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 50";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:188;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 51";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:189;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 51";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:190;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 51";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:191;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 51";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:52:"final test, not only without phrase match, but ... ";}}i:192;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 52";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:193;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 52";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:194;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 52";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:56:"final test, not only without phrase match, but also ... ";}}i:195;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 52";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:56:"final test, not only without phrase match, but also ... ";}}i:196;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 53";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:197;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 53";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:198;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 53";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:199;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 53";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:200;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 54";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:201;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 54";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:202;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 54";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:203;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 54";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:204;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 55";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:205;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 55";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:206;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 55";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:207;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 55";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:208;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 56";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:209;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 56";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:210;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 56";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:211;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 56";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:212;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 57";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:213;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 57";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:214;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 57";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:215;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 57";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:57:"final test, not only without phrase match, but also ... ";}}i:216;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 58";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:217;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 58";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:218;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 58";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:62:"final test, not only without phrase match, but also above ... ";}}i:219;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 58";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:62:"final test, not only without phrase match, but also above ... ";}}i:220;a:2:{s:4:"opts";s:27:"phrase 0, force 0, limit 59";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:221;a:2:{s:4:"opts";s:27:"phrase 0, force 1, limit 59";s:6:"result";a:4:{i:0;s:57:"this is my test text to be highlighted ... ";i:1;s:62:"another test text to be highlighted, below limit";i:2;s:46:"test number three, without phrase match";i:3;s:93:"final test, not only without ... with swapped phrase text test as well";}}i:222;a:2:{s:4:"opts";s:27:"phrase 1, force 0, limit 59";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:63:"final test, not only without phrase match, but also above ... ";}}i:223;a:2:{s:4:"opts";s:27:"phrase 1, force 1, limit 59";s:6:"result";a:4:{i:0;s:50:"this is my test text to be highlighted ... ";i:1;s:55:"another test text to be highlighted, below limit";i:2;s:39:"test number three, without phrase match";i:3;s:63:"final test, not only without phrase match, but also above ... ";}}}}}sphinx-2.0.4-release/test/test_101/0000755000176700017710000000000011724063141016261 5ustar deogardeogarsphinx-2.0.4-release/test/test_101/test.xml0000644000176700017710000000327511421075337017775 0ustar deogardeogar RT: basic transactional syntax indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = data/test rt_attr_uint = idd rt_field = content } index test1 { type = rt path = data/test1 rt_attr_uint = idd rt_field = content } insert into test (id,content) values (1,'content') insert into test1 (id,content) values (1,'content') select * from test select * from test1 set autocommit=0 insert into test (id,content) values (2,'content') insert into test1 (id,content) values (2,'content') delete from test1 where id=1 delete from test where id=1 set autocommit=1 select * from test select * from test1 begin insert into test1 (id,content) values (2,'content') rollback select * from test1 start transaction insert into test1 (id,content) values (2,'content') commit select * from test1 set autocommit=0 insert into test (id,content) values (3,'content') select * from test set autocommit=1 select * from test select * from test1 sphinx-2.0.4-release/test/test_101/model.bin0000644000176700017710000000631311455516446020072 0ustar deogardeogara:1:{i:0;a:26:{i:0;a:2:{s:8:"sphinxql";s:50:"insert into test (id,content) values (1,'content')";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:51:"insert into test1 (id,content) values (1,'content')";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:3;a:3:{s:8:"sphinxql";s:19:"select * from test1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:4;a:2:{s:8:"sphinxql";s:16:"set autocommit=0";s:14:"total_affected";i:0;}i:5;a:2:{s:8:"sphinxql";s:50:"insert into test (id,content) values (2,'content')";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:51:"insert into test1 (id,content) values (2,'content')";s:5:"error";s:50:"current txn is working with another index ('test')";s:5:"errno";i:1064;}i:7;a:3:{s:8:"sphinxql";s:28:"delete from test1 where id=1";s:5:"error";s:50:"current txn is working with another index ('test')";s:5:"errno";i:1064;}i:8;a:2:{s:8:"sphinxql";s:27:"delete from test where id=1";s:14:"total_affected";i:0;}i:9;a:2:{s:8:"sphinxql";s:16:"set autocommit=1";s:14:"total_affected";i:0;}i:10;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:11;a:3:{s:8:"sphinxql";s:19:"select * from test1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:12;a:2:{s:8:"sphinxql";s:5:"begin";s:14:"total_affected";i:0;}i:13;a:2:{s:8:"sphinxql";s:51:"insert into test1 (id,content) values (2,'content')";s:14:"total_affected";i:1;}i:14;a:2:{s:8:"sphinxql";s:8:"rollback";s:14:"total_affected";i:0;}i:15;a:3:{s:8:"sphinxql";s:19:"select * from test1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:16;a:2:{s:8:"sphinxql";s:17:"start transaction";s:14:"total_affected";i:0;}i:17;a:2:{s:8:"sphinxql";s:51:"insert into test1 (id,content) values (2,'content')";s:14:"total_affected";i:1;}i:18;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:19;a:3:{s:8:"sphinxql";s:19:"select * from test1";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:20;a:2:{s:8:"sphinxql";s:16:"set autocommit=0";s:14:"total_affected";i:0;}i:21;a:2:{s:8:"sphinxql";s:50:"insert into test (id,content) values (3,'content')";s:14:"total_affected";i:1;}i:22;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:23;a:2:{s:8:"sphinxql";s:16:"set autocommit=1";s:14:"total_affected";i:0;}i:24;a:3:{s:8:"sphinxql";s:18:"select * from test";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}i:25;a:3:{s:8:"sphinxql";s:19:"select * from test1";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"0";}}}}}sphinx-2.0.4-release/test/test_107/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_107/test.xml0000644000176700017710000000475711421075337020011 0ustar deogardeogar RT: memory vs disk kill-list searchd { workers = threads binlog_path = } index test { type = rt path = /testrt123 rt_mem_limit = 128K rt_attr_uint = group_id rt_field = title rt_field = content } indexer { mem_limit = 16M } source dummysrc { type = mysql sql_query = SELECT * FROM test_table where id=1 } index dummysrcmain { source = dummysrc path = /dummysrcmain } CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT, title varchar(255) NOT NULL ); DROP TABLE IF EXISTS test_table; $value) $foo[$key] = $value; $results[] = $foo; } } } @mysql_close($sock); ]]> sphinx-2.0.4-release/test/test_107/model.bin0000644000176700017710000000075111455516446020100 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:9:{i:0;s:18:"total inserted=469";i:1;s:21:"query 0: total_rows=1";i:2;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:8:"group_id";s:4:"5001";}i:3;s:21:"query 1: total_rows=1";i:4;a:3:{s:2:"id";s:2:"50";s:6:"weight";s:1:"1";s:8:"group_id";s:4:"5050";}i:5;s:21:"query 2: total_rows=1";i:6;a:3:{s:2:"id";s:3:"450";s:6:"weight";s:1:"1";s:8:"group_id";s:4:"5450";}i:7;s:21:"query 3: total_rows=1";i:8;a:3:{s:2:"id";s:3:"469";s:6:"weight";s:1:"1";s:8:"group_id";s:4:"5469";}}}}sphinx-2.0.4-release/test/test_134/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_134/test.xml0000644000176700017710000001573611662472433020015 0ustar deogardeogar snippets vs SPZ searchd { dist_threads = 2 } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 html_strip = 1 index_sp = 1 index_zones = zone_* dict = keywords } index test1 { source = test path = /test1 html_strip = 1 index_sp = 1 index_zones = zone_* dict = crc charset_type = utf-8 charset_table = 0..9, a..z, A..Z->a..z } index test2 { source = test path = /test2 html_strip = 1 index_sp = 1 index_zones = zone_* dict = crc charset_type = utf-8 charset_table = 0..9, a..z, A..Z->a..z blend_chars = (, ) } index test3 { source = test path = /test3 html_strip = 1 html_remove_elements = style, script, head index_sp = 1 dict = crc charset_type = utf-8 } select 1; array ( "

    The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. Statements are considered integral parts of this form.

    " ), "opt"=>array ( "limit"=>150 , "limit_words"=>60 , "limit_passages"=>4 , "around"=>7 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"sentence" , "emit_zones"=>false , "exact_phrase"=>false ) ); // 1 test $refs[] = array( "doc"=>array ( "

    The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. Statements are considered integral parts of this form.

    " ), "opt"=>array ( "limit"=>150 , "limit_words"=>60 , "limit_passages"=>4 , "around"=>7 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"sentence" , "emit_zones"=>false , "exact_phrase"=>true ) ); // 2 test $refs[] = array( "doc"=>array ( ' The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. are It or is cool It is cooler It is another place! ' ), "opt"=>array ( "limit"=>170 , "limit_words"=>60 , "limit_passages"=>8 , "around"=>8 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"zone" , "emit_zones"=>true , "exact_phrase"=>false ) ); // 3 test $refs[] = array( "doc"=>array ( ' The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. are It or is cool It is cooler It is another place! ' ), "opt"=>array ( "limit"=>170 , "limit_words"=>60 , "limit_passages"=>8 , "around"=>8 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"zone" , "emit_zones"=>true , "exact_phrase"=>true ) ); // 4 test $refs[] = array( "doc"=>array ( ' The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. are It or is cool It is cooler It is another place! ' ), "opt"=>array ( "limit"=>170 , "limit_words"=>60 , "limit_passages"=>8 , "around"=>8 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"zone" , "emit_zones"=>false , "exact_phrase"=>false ) ); // 5 test - HighlightAll path - no passages $refs[] = array( "doc"=>array ( "The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. Statements are considered integral parts of this form." ), "opt"=>array ( "limit"=>0 , "limit_passages"=>4 , "around"=>7 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"sentence" , "emit_zones"=>false , "exact_phrase"=>false ) ); foreach ( $refs as $ref ) { $results[] = $client->BuildExcerpts($ref["doc"], 'test', $query, $ref["opt"] ); } // last test - nothing found - HighlightStart path $refs[3]['opt']['limit'] = 150; $results[] = $client->BuildExcerpts($refs[3]["doc"], 'test', "nothing found", $refs[3]["opt"] ); // 7 test $docs7 = array ( ' The manager it. Is Filing this report and.Is a signed hereby represent.Is another place! ' ); $opts7 = array ( 'query_mode'=>1 ,'limit'=>70 , "limit_passages"=>4 , "around"=>8 , "html_strip_mode"=>"strip" , 'chunk_separator'=>"\n<--->\n" , "passage_boundary"=>"zone" , "emit_zones"=>false); $results[] = $client->BuildExcerpts($docs7, 'test1', 'it is', $opts7 ); // 8 test $opts7['query_mode'] = 0; $results[] = $client->BuildExcerpts($docs7, 'test1', 'it is', $opts7 ); // 9 test $opts7['passage_boundary'] = 'sentence'; $opts7['query_mode'] = 1; $results[] = $client->BuildExcerpts($docs7, 'test1', 'it is', $opts7 ); // 10 test $opts7['query_mode'] = 0; $results[] = $client->BuildExcerpts($docs7, 'test1', 'it is', $opts7 ); // 11 test $opts7['html_strip_mode'] = 'index'; $opts7['passage_boundary'] = 'paragraph'; $opts7['query_mode'] = 1; $opts7['limit_passages'] = 8; $results[] = $client->BuildExcerpts($docs7, 'test1', 'it | is | ma1 | ma2', $opts7 ); $docs12 = array ('Leverage gains (if any). May fallback or go insolvent. Credit risk of the deal.'); $words12 = 'fallback insolvent'; // 12 test $opts12 = array ( 'limit'=>255, 'limit_words'=>12, 'limit_passages'=>5, 'passage_boundary'=>'paragraph', 'around'=>10 ); $results[] = $client->BuildExcerpts($docs12, 'test2', $words12, $opts12 ); // 13 test $opts12 = array ( 'limit'=>0, 'limit_words'=>0, 'limit_passages'=>0, 'passage_boundary'=>'paragraph', 'around'=>10 ); $results[] = $client->BuildExcerpts($docs12, 'test2', $words12, $opts12 ); // 14 test - differrent setup with dist_threads $docs14 = array ( ' this match should be removed and this match pass is

    split as well as this match

    tail of documnet ', ' this match should be removed and this match pass is

    split as well as this match

    tail of documnet ' ); $opts14 = array ( 'limit'=>40, 'limit_words'=>0, 'limit_passages'=>0, 'passage_boundary'=>'paragraph', 'html_strip_mode'=>'index', 'around'=>25 , 'query_mode'=>1 ); $words14 = 'match | pass'; $results[] = $client->BuildExcerpts($docs14, 'test3', $words14, $opts14 ); ]]> sphinx-2.0.4-release/test/test_134/model.bin0000644000176700017710000000563411570647130020076 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:15:{i:0;a:1:{i:0;s:200:" <---> The institutional investment manager it. <---> Is Filing this report and. <---> It is signed hereby represent. <---> That it is all information. <---> ";}i:1;a:1:{i:0;s:92:" <---> It is signed hereby represent. <---> That it is all information. <---> ";}i:2;a:1:{i:0;s:367:" <---> The institutional investment manager it. Is Filing this report and. <---> It is signed hereby represent. That it is all information. are It or is <---> cool It is cooler <---> It is another place! <---> ";}i:3;a:1:{i:0;s:318:" <---> The institutional investment manager it. Is Filing this report and. <---> It is signed hereby represent. That it is all information. are It or is <---> cool It is cooler <---> It is another place! <---> ";}i:4;a:1:{i:0;s:298:" <---> The institutional investment manager it. Is Filing this report and. <---> It is signed hereby represent. That it is all information. are It or is <---> cool It is cooler <---> It is another place! <---> ";}i:5;a:1:{i:0;s:223:"The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. Statements are considered integral parts of this form.";}i:6;a:1:{i:0;s:156:" The institutional investment manager it. Is Filing this report and. It is signed hereby represent. That it is all information. are It or is cool It <---> ";}i:7;a:1:{i:0;s:113:" <---> The manager it. Is Filing this report and. <---> Is a signed hereby represent. <---> ";}i:8;a:1:{i:0;s:113:" <---> The manager it. Is Filing this report and. <---> Is a signed hereby represent. <---> ";}i:9;a:1:{i:0;s:119:" <---> The manager it. <---> Is Filing this report and. <---> Is a signed hereby represent. <---> ";}i:10;a:1:{i:0;s:119:" <---> The manager it. <---> Is Filing this report and. <---> Is a signed hereby represent. <---> ";}i:11;a:1:{i:0;s:113:" <---> The manager it. Is Filing this report and. <---> Is a signed hereby represent. <---> ";}i:12;a:1:{i:0;s:89:" ... gains (if any). May fallback or go insolvent. Credit risk of the ... ";}i:13;a:1:{i:0;s:93:"Leverage gains (if any). May fallback or go insolvent. Credit risk of the deal.";}i:14;a:2:{i:0;s:87:" ... and this match pass is ... split as well as this match ... ";i:1;s:87:" ... and this match pass is ... split as well as this match ... ";}}}}sphinx-2.0.4-release/test/test_002/0000755000176700017710000000000011724063141016261 5ustar deogardeogarsphinx-2.0.4-release/test/test_002/test.xml0000644000176700017710000000373610744717627020013 0ustar deogardeogar prefix/infix indexing (part 2) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 1 min_prefix_len = 0 min_prefix_len = 1 min_prefix_len = 3 min_infix_len = 0 min_infix_len = 1 min_infix_len = 3 enable_star = 0 enable_star = 1 } admin *earc* up* dmin rep pda I CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_002/model.bin0000644000176700017710000006311410723664234020070 0ustar deogardeogara:18:{i:0;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:1;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:1:"I";}}i:2;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:3;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:2:"13";}}s:5:"query";s:1:"I";}}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:7;a:1:{i:0;s:6:"failed";}i:8;a:1:{i:0;s:6:"failed";}i:9;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"up";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:10;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:11;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:12;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:13;a:1:{i:0;s:6:"failed";}i:14;a:1:{i:0;s:6:"failed";}i:15;a:7:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"i";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:1:"I";}}i:16;a:1:{i:0;s:6:"failed";}i:17;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/test_019/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_019/test.xml0000644000176700017710000001372511573755265020023 0ustar deogardeogar extended queries indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_query_pre = SET NAMES utf8 } index test { source = srctest path = /test min_word_len = 2 charset_type = utf-8 ngram_chars = U+4E00..U+9FBF, U+3400..U+4DBF, U+20000..U+2A6DF, U+F900..U+FAFF,U+2F800..U+2FA1F, U+2E80..U+2EFF, U+2F00..U+2FDF, U+3100..U+312F, U+31A0..U+31BF,U+3040..U+309F, U+30A0..U+30FF, U+31F0..U+31FF, U+AC00..U+D7AF, U+1100..U+11FF,U+3130..U+318F, U+A000..U+A48F, U+A490..U+A4CF ngram_len = 1 charset_table = U+FF10..U+FF19->0..9, 0..9, U+FF41..U+FF5A->a..z, U+FF21..U+FF3A->a..z,A..Z->a..z, a..z, U+0149, U+017F, U+0138, U+00DF, U+00FF, U+00C0..U+00D6->U+00E0..U+00F6,U+00E0..U+00F6, U+00D8..U+00DE->U+00F8..U+00FE, U+00F8..U+00FE, U+0100->U+0101, U+0101,U+0102->U+0103, U+0103, U+0104->U+0105, U+0105, U+0106->U+0107, U+0107, U+0108->U+0109,U+0109, U+010A->U+010B, U+010B, U+010C->U+010D, U+010D, U+010E->U+010F, U+010F,U+0110->U+0111, U+0111, U+0112->U+0113, U+0113, U+0114->U+0115, U+0115, U+0116->U+0117,U+0117, U+0118->U+0119, U+0119, U+011A->U+011B, U+011B, U+011C->U+011D, U+011D,U+011E->U+011F, U+011F, U+0130->U+0131, U+0131, U+0132->U+0133, U+0133, U+0134->U+0135,U+0135, U+0136->U+0137, U+0137, U+0139->U+013A, U+013A, U+013B->U+013C, U+013C,U+013D->U+013E, U+013E, U+013F->U+0140, U+0140, U+0141->U+0142, U+0142, U+0143->U+0144,U+0144, U+0145->U+0146, U+0146, U+0147->U+0148, U+0148, U+014A->U+014B, U+014B,U+014C->U+014D, U+014D, U+014E->U+014F, U+014F, U+0150->U+0151, U+0151, U+0152->U+0153,U+0153, U+0154->U+0155, U+0155, U+0156->U+0157, U+0157, U+0158->U+0159, U+0159,U+015A->U+015B, U+015B, U+015C->U+015D, U+015D, U+015E->U+015F, U+015F, U+0160->U+0161,U+0161, U+0162->U+0163, U+0163, U+0164->U+0165, U+0165, U+0166->U+0167, U+0167,U+0168->U+0169, U+0169, U+016A->U+016B, U+016B, U+016C->U+016D, U+016D, U+016E->U+016F,U+016F, U+0170->U+0171, U+0171, U+0172->U+0173, U+0173, U+0174->U+0175, U+0175,U+0176->U+0177, U+0177, U+0178->U+00FF, U+00FF, U+0179->U+017A, U+017A, U+017B->U+017C,U+017C, U+017D->U+017E, U+017E, U+0410..U+042F->U+0430..U+044F, U+0430..U+044F,U+05D0..U+05EA, U+0531..U+0556->U+0561..U+0586, U+0561..U+0587, U+0621..U+063A, U+01B9,U+01BF, U+0640..U+064A, U+0660..U+0669, U+066E, U+066F, U+0671..U+06D3, U+06F0..U+06FF,U+0904..U+0939, U+0958..U+095F, U+0960..U+0963, U+0966..U+096F, U+097B..U+097F,U+0985..U+09B9, U+09CE, U+09DC..U+09E3, U+09E6..U+09EF, U+0A05..U+0A39, U+0A59..U+0A5E,U+0A66..U+0A6F, U+0A85..U+0AB9, U+0AE0..U+0AE3, U+0AE6..U+0AEF, U+0B05..U+0B39,U+0B5C..U+0B61, U+0B66..U+0B6F, U+0B71, U+0B85..U+0BB9, U+0BE6..U+0BF2, U+0C05..U+0C39,U+0C66..U+0C6F, U+0C85..U+0CB9, U+0CDE..U+0CE3, U+0CE6..U+0CEF, U+0D05..U+0D39, U+0D60,U+0D61, U+0D66..U+0D6F, U+0D85..U+0DC6, U+1900..U+1938, U+1946..U+194F, U+A800..U+A805,U+A807..U+A822, U+0386->U+03B1, U+03AC->U+03B1, U+0388->U+03B5, U+03AD->U+03B5,U+0389->U+03B7, U+03AE->U+03B7, U+038A->U+03B9, U+0390->U+03B9, U+03AA->U+03B9,U+03AF->U+03B9, U+03CA->U+03B9, U+038C->U+03BF, U+03CC->U+03BF, U+038E->U+03C5,U+03AB->U+03C5, U+03B0->U+03C5, U+03CB->U+03C5, U+03CD->U+03C5, U+038F->U+03C9,U+03CE->U+03C9, U+03C2->U+03C3, U+0391..U+03A1->U+03B1..U+03C1,U+03A3..U+03A9->U+03C3..U+03C9, U+03B1..U+03C1, U+03C3..U+03C9, U+0E01..U+0E2E,U+0E30..U+0E3A, U+0E40..U+0E45, U+0E47, U+0E50..U+0E59, U+A000..U+A48F, U+4E00..U+9FBF,U+3400..U+4DBF, U+20000..U+2A6DF, U+F900..U+FAFF, U+2F800..U+2FA1F, U+2E80..U+2EFF,U+2F00..U+2FDF, U+3100..U+312F, U+31A0..U+31BF, U+3040..U+309F, U+30A0..U+30FF,U+31F0..U+31FF, U+AC00..U+D7AF, U+1100..U+11FF, U+3130..U+318F, U+A000..U+A48F,U+A490..U+A4CF } basic query "phrase query" "phrase (query)/3 ~on steroids" @title sample @body world "quorum query test"/1 "quorum query test"/4 "hello program"~3 "hello program"~4 å æˆ‘ basic | china "test program" | basic "test that"~3 | basic "test that"~3|basic @title sample @body -basic -basic|perl sample -basic|perl sample 77 0077 @title test @ttitle test @@title test @@relaxed @ttitle test "" @title "" ("") "phrase (!query)/ ~on @steroids" 1234567812345678 CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `title` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) CHARACTER SET utf8 DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES ( 111, '', 'basic query' ), ( 222, '', 'phrase query on steroids' ), ( 333, 'sample program', 'this is a test program that prints out "hello world" to the console' ), ( 444, '', 'china åæˆ‘' ), ( 555, 'sample program two', 'something written in basic' ), ( 666, 'sample program three', 'something written in perl' ), ( 777, '', '77 lies multiplied by 77' ), ( 888, '', 'agent 0077' ), ( 999, '', '1234567812345678' ) sphinx-2.0.4-release/test/test_019/model.bin0000644000176700017710000003120311573755265020103 0ustar deogardeogara:1:{i:0;a:28:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:111;a:2:{s:6:"weight";s:4:"2636";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"basic query";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:222;a:2:{s:6:"weight";s:4:"2676";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"phrase";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""phrase query"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:6:"phrase";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"on";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:8:"steroids";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:""phrase (query)/3 ~on steroids"";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:333;a:2:{s:6:"weight";s:4:"2650";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"sample";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:5:"world";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:25:"@title sample @body world";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:333;a:2:{s:6:"weight";s:4:"1572";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}i:222;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:6:"quorum";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""quorum query test"/1";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:90:"quorum threshold too high (words=3, thresh=4); replacing quorum operator with AND operator";s:6:"status";i:3;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:6:"quorum";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""quorum query test"/4";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"program";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""hello program"~3";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:333;a:2:{s:6:"weight";s:4:"1665";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"hello";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"program";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""hello program"~4";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:444;a:2:{s:6:"weight";s:4:"1716";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"å";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"å";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:444;a:2:{s:6:"weight";s:4:"1716";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"我";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"我";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:444;a:2:{s:6:"weight";s:4:"1608";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1568";s:5:"attrs";a:0:{}}i:555;a:2:{s:6:"weight";s:4:"1568";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"china";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"basic | china";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:333;a:2:{s:6:"weight";s:4:"2610";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}i:555;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"program";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"4";}s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:""test program" | basic";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:333;a:2:{s:6:"weight";s:4:"1644";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}i:555;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"that";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""test that"~3 | basic";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:333;a:2:{s:6:"weight";s:4:"1644";s:5:"attrs";a:0:{}}i:111;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}i:555;a:2:{s:6:"weight";s:4:"1545";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"that";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""test that"~3|basic";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:333;a:2:{s:6:"weight";s:4:"1541";s:5:"attrs";a:0:{}}i:666;a:2:{s:6:"weight";s:4:"1541";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"sample";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:26:"@title sample @body -basic";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:333;a:2:{s:6:"weight";s:4:"1527";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"perl";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"sample";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:"-basic|perl sample";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:333;a:2:{s:6:"weight";s:4:"1527";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"basic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"perl";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:6:"sample";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:18:"-basic|perl sample";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:777;a:2:{s:6:"weight";s:4:"1798";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{i:77;a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"77";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:888;a:2:{s:6:"weight";s:4:"1716";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"0077";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"0077";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"@title test";}i:20;a:6:{s:5:"query";s:12:"@ttitle test";s:5:"error";s:58:"index test: query error: no field 'ttitle' found in schema";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:21;a:6:{s:5:"query";s:12:"@@title test";s:5:"error";s:70:"index test: syntax error, unexpected TOK_FIELDLIMIT near '@title test'";s:7:"warning";s:0:"";s:5:"total";i:0;s:11:"total_found";i:0;s:4:"time";i:0;}i:22;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"@@relaxed @ttitle test";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:2:"""";}i:24;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"@title """;}i:25;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"("")";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:222;a:2:{s:6:"weight";s:4:"4696";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:6:"phrase";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:5:"query";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"on";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:8:"steroids";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:32:""phrase (!query)/ ~on @steroids"";}i:27;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:999;a:2:{s:6:"weight";s:4:"1716";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:16:"1234567812345678";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"1234567812345678";}}}sphinx-2.0.4-release/test/test_050/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_050/test.xml0000644000176700017710000000367611102563613020000 0ustar deogardeogar bigint attrs indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT id, a, n, text FROM test_table; sql_attr_uint = a sql_attr_bigint = n } index test { source = test path = /test } CREATE TABLE test_table ( id INT NOT NULL, a INT UNSIGNED NOT NULL, n BIGINT NOT NULL, text VARCHAR(255) NOT NULL DEFAULT 'text' ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (id, a, n) VALUES ( 1, 10, -70000000000 ), ( 2, 20, -60000000000 ), ( 3, 30, -50000000000 ), ( 4, 40, 50000000000 ), ( 5, 50, 60000000000 ), ( 6, 60, 70000000000 ), ( 7, 70, -4611686018427387903 ), ( 8, 80, -9223372036854775807 ), ( 9, 90, 4611686018427387903 ), ( 10, 100, 9223372036854775807 ), ( 11, 110, 250000000000000000 ), ( 101, 0, -1 ), ( 102, 0, -2 ); a n sel @groupby @count sphinx-2.0.4-release/test/test_050/model.bin0000644000176700017710000003477311455516446020110 0ustar deogardeogara:1:{i:0;a:14:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:13:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:3:"100";s:1:"n";s:19:"9223372036854775807";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"0";s:1:"n";i:-1;}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"0";s:1:"n";i:-2;}}}s:5:"total";s:2:"13";s:11:"total_found";s:2:"13";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"n";i:6;s:5:"@expr";i:5;}s:7:"matches";a:13:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";s:5:"@expr";d:9223372036854775808;}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";s:5:"@expr";d:4611686018427387904;}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:5:"@expr";d:70000001024;}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:5:"@expr";d:60000002048;}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:5:"@expr";d:49999998976;}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-2;s:5:"@expr";d:2;}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-1;s:5:"@expr";d:1;}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";s:5:"@expr";d:-49999998976;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";s:5:"@expr";d:-60000002048;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";s:5:"@expr";d:-70000001024;}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";s:5:"@expr";d:-249999996076687360;}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";s:5:"@expr";d:-4611686018427387904;}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"100";s:1:"n";s:19:"9223372036854775807";s:5:"@expr";d:-9223372036854775808;}}}s:5:"total";s:2:"13";s:11:"total_found";s:2:"13";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"n";i:6;s:3:"sel";i:6;}s:7:"matches";a:13:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:3:"sel";s:12:"-69999999990";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:3:"sel";s:12:"-59999999980";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:3:"sel";s:12:"-49999999970";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";s:3:"sel";s:11:"50000000040";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";s:3:"sel";s:11:"60000000050";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";s:3:"sel";s:11:"70000000060";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";s:3:"sel";s:20:"-4611686018427387833";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";s:3:"sel";s:20:"-9223372036854775727";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";s:3:"sel";s:19:"4611686018427387993";}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"100";s:1:"n";s:19:"9223372036854775807";s:3:"sel";s:20:"-9223372036854775709";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";s:3:"sel";s:18:"250000000000000110";}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-1;s:3:"sel";i:-1;}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-2;s:3:"sel";i:-2;}}}s:5:"total";s:2:"13";s:11:"total_found";s:2:"13";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"a";i:1;s:1:"n";i:6;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:13:{i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:3:"100";s:1:"n";s:19:"9223372036854775807";s:8:"@groupby";s:19:"9223372036854775807";s:6:"@count";s:1:"1";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";s:8:"@groupby";s:19:"4611686018427387903";s:6:"@count";s:1:"1";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";s:8:"@groupby";s:18:"250000000000000000";s:6:"@count";s:1:"1";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";s:8:"@groupby";s:11:"70000000000";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";s:8:"@groupby";s:11:"60000000000";s:6:"@count";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";s:8:"@groupby";s:11:"50000000000";s:6:"@count";s:1:"1";}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:1:"0";s:1:"n";i:-1;s:8:"@groupby";i:-1;s:6:"@count";s:1:"1";}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:1:"0";s:1:"n";i:-2;s:8:"@groupby";i:-2;s:6:"@count";s:1:"1";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:8:"@groupby";s:12:"-50000000000";s:6:"@count";s:1:"1";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:8:"@groupby";s:12:"-60000000000";s:6:"@count";s:1:"1";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:8:"@groupby";s:12:"-70000000000";s:6:"@count";s:1:"1";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";s:8:"@groupby";s:20:"-4611686018427387903";s:6:"@count";s:1:"1";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";s:8:"@groupby";s:20:"-9223372036854775807";s:6:"@count";s:1:"1";}}}s:5:"total";s:2:"13";s:11:"total_found";s:2:"13";s:4:"time";s:5:"0.001";s:5:"query";s:0:"";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:1:"a";i:1;s:1:"n";i:6;s:3:"sel";i:6;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:8:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";s:3:"sel";s:19:"9223372036854775807";s:8:"@groupby";s:19:"9223372036854775807";s:6:"@count";s:1:"2";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";s:3:"sel";s:19:"4611686018427387903";s:8:"@groupby";s:19:"4611686018427387903";s:6:"@count";s:1:"2";}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";s:3:"sel";s:18:"250000000000000000";s:8:"@groupby";s:18:"250000000000000000";s:6:"@count";s:1:"1";}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:3:"sel";s:11:"70000000000";s:8:"@groupby";s:11:"70000000000";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:3:"sel";s:11:"60000000000";s:8:"@groupby";s:11:"60000000000";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:3:"sel";s:11:"50000000000";s:8:"@groupby";s:11:"50000000000";s:6:"@count";s:1:"2";}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:1:"0";s:1:"n";i:-2;s:3:"sel";i:2;s:8:"@groupby";i:2;s:6:"@count";s:1:"1";}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:1:"0";s:1:"n";i:-1;s:3:"sel";i:1;s:8:"@groupby";i:1;s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"query";s:0:"";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:1:"a";i:1;s:1:"n";i:6;s:3:"sel";i:6;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:3:"sel";i:7;s:8:"@groupby";i:7;s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:3:"sel";i:6;s:8:"@groupby";i:6;s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:3:"sel";i:5;s:8:"@groupby";i:5;s:6:"@count";s:1:"2";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"query";s:0:"";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:1:"a";i:1;s:1:"n";i:6;s:3:"sel";i:6;s:8:"@groupby";i:6;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:3:"sel";i:7;s:8:"@groupby";i:7;s:6:"@count";s:1:"6";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"query";s:0:"";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:1:"a";i:1;s:1:"n";i:6;s:3:"sel";i:6;}s:7:"matches";a:13:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";s:3:"sel";s:12:"-70000000000";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";s:3:"sel";s:12:"-60000000000";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";s:3:"sel";s:12:"-50000000000";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";s:3:"sel";i:40;}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";s:3:"sel";i:50;}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";s:3:"sel";i:60;}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";s:3:"sel";s:20:"-4611686018427387903";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"80";s:1:"n";s:20:"-9223372036854775807";s:3:"sel";s:20:"-9223372036854775807";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";s:3:"sel";i:90;}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"100";s:1:"n";s:19:"9223372036854775807";s:3:"sel";i:100;}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:3:"110";s:1:"n";s:18:"250000000000000000";s:3:"sel";i:110;}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-1;s:3:"sel";i:-1;}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:1:"a";s:1:"0";s:1:"n";i:-2;s:3:"sel";i:-2;}}}s:5:"total";s:2:"13";s:11:"total_found";s:2:"13";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:8;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"0";s:1:"n";i:-1;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:9;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:10;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:1:{i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"90";s:1:"n";s:19:"4611686018427387903";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:11;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"10";s:1:"n";s:12:"-70000000000";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"20";s:1:"n";s:12:"-60000000000";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"30";s:1:"n";s:12:"-50000000000";}}i:101;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"0";s:1:"n";i:-1;}}i:102;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:1:"0";s:1:"n";i:-2;}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:12;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:3:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"40";s:1:"n";s:11:"50000000000";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"50";s:1:"n";s:11:"60000000000";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"60";s:1:"n";s:11:"70000000000";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:13;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"a";i:1;s:1:"n";i:6;}s:7:"matches";a:1:{i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"a";s:2:"70";s:1:"n";s:20:"-4611686018427387903";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/data/0000755000176700017710000000000011724063141015632 5ustar deogardeogarsphinx-2.0.4-release/test/data/stub.txt0000644000176700017710000000011411220101253017330 0ustar deogardeogarThis is stub file to force Mercurial to keep this folder in it's repository.sphinx-2.0.4-release/test/test_150/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_150/test.xml0000644000176700017710000000277011470740627020005 0ustar deogardeogar keywords dictionary vs expansion limit indexer { mem_limit = 16M } searchd { expansion_limit = 0 expansion_limit = 3 expansion_limit = 2 } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index test { source = test path = /test dict = keywords min_prefix_len = 2 enable_star = 1 } create table test_table ( id int not null, gid int not null, title varchar(255) not null ); drop table if exists test_table; insert into test_table values ( 1, 1, 'funny place.' ); insert into test_table values ( 2, 1, 'is it function?' ); insert into test_table values ( 3, 1, 'functional lang. quite funny.' ); insert into test_table values ( 4, 1, 'functic sequence.' ); insert into test_table values ( 5, 1, 'fun place. funny place.' ); insert into test_table values ( 6, 1, 'its function.' ); insert into test_table values ( 7, 1, 'functional is not place.' ); insert into test_table values ( 8, 1, 'functic is not place.' ); fu* func* sphinx-2.0.4-release/test/test_150/model.bin0000644000176700017710000001226711470740627020100 0ustar deogardeogara:3:{i:0;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:8:{i:5;a:2:{s:6:"weight";s:4:"1557";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1540";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1525";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:4;a:2:{s:6:"weight";s:4:"1525";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:6;a:2:{s:6:"weight";s:4:"1525";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1525";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:8;a:2:{s:6:"weight";s:4:"1525";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:1;a:2:{s:6:"weight";s:4:"1514";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.004";s:5:"words";a:5:{s:8:"function";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"funny";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:7:"functic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"fun";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"fu*";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:6:{i:2;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:4;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:6;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:8;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:8:"function";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:7:"functic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"func*";}}i:1;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:6:{i:3;a:2:{s:6:"weight";s:4:"1567";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:6;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:1;a:2:{s:6:"weight";s:4:"1523";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:5;a:2:{s:6:"weight";s:4:"1523";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"funny";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:8:"function";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"fu*";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:6:{i:2;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:4;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:6;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:8;a:2:{s:6:"weight";s:4:"1543";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.003";s:5:"words";a:3:{s:8:"function";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:7:"functic";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"func*";}}i:2;a:2:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:3;a:2:{s:6:"weight";s:4:"1600";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1564";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:1;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:5;a:2:{s:6:"weight";s:4:"1535";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.003";s:5:"words";a:2:{s:5:"funny";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"fu*";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:2;a:2:{s:6:"weight";s:4:"1564";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:3;a:2:{s:6:"weight";s:4:"1564";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:6;a:2:{s:6:"weight";s:4:"1564";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}i:7;a:2:{s:6:"weight";s:4:"1564";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:10:"functional";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:8:"function";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"func*";}}}sphinx-2.0.4-release/test/test_037/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_037/test.xml0000644000176700017710000000434411601427776020013 0ustar deogardeogar rankers indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table WHERE id BETWEEN 1 AND 10 sql_query_pre = SET NAMES utf8 } source srctest2 : srctest { sql_query = SELECT * FROM test_table WHERE id BETWEEN 11 AND 20 } index test { source = srctest path = /test charset_type = utf-8 morphology = stem_ru, stem_en charset_table = 0..9, A..Z->a..z, _, a..z, U+410..U+42F->U+430..U+44F, U+430..U+44F } index test2 { source = srctest2 path = /test2 charset_type = utf-8 } "зимние шины" "зимние шины" "зимние шины" "зимние шины" @title test market street CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `title` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) CHARACTER SET utf8 DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES ( 1, 'зимние шины диÑки чего то тут зимние шины', '' ), ( 2, 'test doc two', 'second stupid test document with random content' ), ( 3, 'filler', 'filler' ), ( 4, 'filler', 'filler' ), ( 5, 'filler', 'filler' ), ( 6, 'filler', 'filler' ), ( 7, 'filler', 'filler' ), ( 8, 'filler', 'filler' ), ( 9, 'filler', 'filler' ), ( 10, 'filler', 'filler' ), ( 11, 'market street', '' ), ( 12, 'market street west', '' ), ( 13, 'north market street', '' ), ( 14, 'farmers market street north', '' ), ( 15, 'flower street market', '' ), ( 16, 'market street is so very market street', '' ) sphinx-2.0.4-release/test/test_037/model.bin0000644000176700017710000000612011601427776020076 0ustar deogardeogara:1:{i:0;a:6:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2800";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"зимн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:6:"шин";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:""зимние шины"";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1800";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"зимн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:6:"шин";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:""зимние шины"";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"зимн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:6:"шин";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:""зимние шины"";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"зимн";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:6:"шин";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:""зимние шины"";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1800";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"@title test";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:6:{i:11;a:2:{s:6:"weight";s:5:"11290";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:5:"10290";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:5:"10212";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"8290";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"8290";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:4:"4290";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"market";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"7";}s:6:"street";a:2:{s:4:"docs";s:1:"6";s:4:"hits";s:1:"7";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:"market street";}}}sphinx-2.0.4-release/test/test_017/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_017/test.xml0000644000176700017710000000236411323636205017777 0ustar deogardeogar phrase matching vs stop words and short words indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id, body FROM test_table } index test { source = srctest path = /test stopwords = stopwords.txt min_word_len = 3 } walking shoes walking in my shoes microsoft office microsoft a office "walking shoes" "walking in my shoes" "microsoft office" "microsoft a office" CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'walking shoes' ), ( 2, 'try walking in my shoes' ), ( 3, 'Microsoft. The Office.' ), ( 4, 'Microsoft Office' ) sphinx-2.0.4-release/test/test_017/model.bin0000644000176700017710000000661411033254243020066 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:7:"walking";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"shoes";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:13:"walking shoes";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:7:"walking";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"shoes";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:19:"walking in my shoes";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:9:"microsoft";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"office";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:16:"microsoft office";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:9:"microsoft";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"office";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:18:"microsoft a office";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:7:"walking";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"shoes";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:15:""walking shoes"";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:7:"walking";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"shoes";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:21:""walking in my shoes"";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:9:"microsoft";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"office";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:18:""microsoft office"";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"2557";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:9:"microsoft";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"office";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:20:""microsoft a office"";}}}sphinx-2.0.4-release/test/test_155/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_155/test.xml0000644000176700017710000000431011542431773020001 0ustar deogardeogar SQL-compliant result set vs max_matches indexer { mem_limit = 16M } searchd { compat_sphinxql_magics = 0 } source test1 { type = mysql sql_query = select id, gid, gid as a, title from test_table sql_attr_uint = gid sql_attr_uint = a } source test2 : test1 { sql_query = select id+10, gid, gid as b, title from test_table sql_attr_uint = gid sql_attr_uint = b } source test3 : test1 { sql_query = select id+20, gid, gid+1 as c, gid+2 as b, title from test_table sql_attr_uint = gid sql_attr_uint = c sql_attr_uint = b } source test4 : test1 { sql_query = select id+30, gid, gid+3 as d, gid+5 as b, title from test_table sql_attr_uint = gid sql_attr_uint = d sql_attr_uint = b } index test1 { source = test1 path = /test1 } index test2 { source = test2 path = /test2 } index test3 { source = test3 path = /test3 } index test4 { source = test4 path = /test4 } index dist1 { type = distributed local = test1 local = test2 } index dist2 { type = distributed local = test2 local = test3 } index dist3 { type = distributed local = test2 agent = :idx52 local = test4 agent_connect_timeout = 1000 agent_query_timeout = 3000 } create table test_table ( id int not null, gid int not null, title varchar(255) not null ); drop table if exists test_table; insert into test_table values ( 1, 123, 'hello world' ); insert into test_table values ( 2, 123, 'hello world' ); insert into test_table values ( 3, 123, 'hello world' ); select gid from dist1 where match('hello') option max_matches=2 select b from dist2 where match('hello') option max_matches=2 select b from dist3 where match('hello') option max_matches=2 select id, gid from dist1 where match('hello') option field_weights=(none1=1000, title=1) sphinx-2.0.4-release/test/test_155/model.bin0000644000176700017710000000201311542354636020072 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:3:{s:8:"sphinxql";s:63:"select gid from dist1 where match('hello') option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:3:"gid";s:3:"123";}i:1;a:1:{s:3:"gid";s:3:"123";}}}i:1;a:3:{s:8:"sphinxql";s:61:"select b from dist2 where match('hello') option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:1:"b";s:3:"123";}i:1;a:1:{s:1:"b";s:3:"123";}}}i:2;a:3:{s:8:"sphinxql";s:61:"select b from dist3 where match('hello') option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:1:"b";s:3:"123";}i:1;a:1:{s:1:"b";s:3:"123";}}}i:3;a:3:{s:8:"sphinxql";s:89:"select id, gid from dist1 where match('hello') option field_weights=(none1=1000, title=1)";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:2:{s:2:"id";s:1:"1";s:3:"gid";s:3:"123";}i:1;a:2:{s:2:"id";s:1:"2";s:3:"gid";s:3:"123";}i:2;a:2:{s:2:"id";s:1:"3";s:3:"gid";s:3:"123";}i:3;a:2:{s:2:"id";s:2:"11";s:3:"gid";s:3:"123";}i:4;a:2:{s:2:"id";s:2:"12";s:3:"gid";s:3:"123";}i:5;a:2:{s:2:"id";s:2:"13";s:3:"gid";s:3:"123";}}}}}sphinx-2.0.4-release/test/test_075/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_075/test.xml0000644000176700017710000000215711323636205020003 0ustar deogardeogar subtree cache warmup indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 100 subtree_hits_cache = 1000 } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three one' ), ( 2, 'one two three one two' ) SetMatchMode ( SPH_MATCH_EXTENDED2 ); $client->AddQuery ("(one two three) | four"); $client->AddQuery ("(one two three) | five"); $results = $client->RunQueries (); for ( $i=0; $i<2; $i++ ) if ( is_array($results) && is_array($results[$i]) ) unset ( $results[$i]["time"] ); ]]> sphinx-2.0.4-release/test/test_075/model.bin0000644000176700017710000000415211232040563020064 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"3379";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3365";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"four";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"3379";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3365";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}}}i:1;a:1:{i:0;a:2:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"3379";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3365";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"four";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:4:"3379";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3365";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"3";}s:5:"three";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:4:"five";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}}}}sphinx-2.0.4-release/test/test_125/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_125/test.xml0000644000176700017710000000227311655632037020005 0ustar deogardeogar select expressions, functions, operators indexer { mem_limit = 16M } searchd { compat_sphinxql_magics = 0 } source test { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = aa sql_attr_multi = uint mm from field } index test { source = test path = /test } CREATE TABLE test_table ( id INT NOT NULL, aa INT NOT NULL, title VARCHAR(255) NOT NULL, mm VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table ( id, aa, title, mm ) VALUES ( 1, 2, 'dummy', '' ), ( 10, 3, 'ohai', '7 40') SELECT 0 AND 0 OR 1 AS a, 0 AND 1 OR 1 AS b, 1 OR 0 AND 0 AS c, 1 OR 1 AND 0 AS d FROM test WHERE id=1 SELECT id, CRC32('test') FROM test WHERE id=10 SELECT id, ABS(CRC32('test')) FROM test WHERE id=10 SELECT id, IF(IN(mm,7),111,222) FROM test sphinx-2.0.4-release/test/test_125/model.bin0000644000176700017710000000156711655632037020103 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:3:{s:8:"sphinxql";s:102:"SELECT 0 AND 0 OR 1 AS a, 0 AND 1 OR 1 AS b, 1 OR 0 AND 0 AS c, 1 OR 1 AND 0 AS d FROM test WHERE id=1";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:1:"a";s:1:"1";s:1:"b";s:1:"1";s:1:"c";s:1:"1";s:1:"d";s:1:"1";}}}i:1;a:3:{s:8:"sphinxql";s:46:"SELECT id, CRC32('test') FROM test WHERE id=10";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"10";s:13:"crc32('test')";s:10:"3632233996";}}}i:2;a:3:{s:8:"sphinxql";s:51:"SELECT id, ABS(CRC32('test')) FROM test WHERE id=10";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:2:"10";s:18:"abs(crc32('test'))";s:9:"662733300";}}}i:3;a:3:{s:8:"sphinxql";s:41:"SELECT id, IF(IN(mm,7),111,222) FROM test";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:20:"if(in(mm,7),111,222)";s:3:"222";}i:1;a:2:{s:2:"id";s:2:"10";s:20:"if(in(mm,7),111,222)";s:3:"111";}}}}}sphinx-2.0.4-release/test/test_172/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_172/test.xml0000644000176700017710000000267011541725604020005 0ustar deogardeogar AVG vs group sorting indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = gid sql_attr_uint = price } index test { source = srctest path = /test docinfo = extern } CREATE TABLE test_table ( document_id INTEGER NOT NULL PRIMARY KEY, body VARCHAR(255) NOT NULL, gid INTEGER NOT NULL, price INTEGER NOT NULL ) DROP TABLE IF EXISTS test_table INSERT INTO test_table VALUES ( 1, 'dummy', 1, 5 ), ( 2, 'dummy', 1, 5 ), ( 3, 'dummy', 2, 15 ), ( 4, 'dummy', 3, 30 ), ( 5, 'dummy', 4, 10 ), ( 6, 'dummy', 4, 10 ), ( 7, 'dummy', 4, 10 ), ( 8, 'dummy', 4, 10 ), ( 9, 'dummy', 4, 10 ), ( 10, 'dummy', 5, 1 ), ( 11, 'dummy', 6, 2 ), ( 12, 'dummy', 7, 3 ), ( 13, 'dummy', 8, 4 ), ( 14, 'dummy', 9, 5 ), ( 15, 'dummy', 10, 6 ) SELECT *, AVG(price) AS p FROM test GROUP BY gid ORDER BY p DESC option max_matches=2 SELECT *, AVG(price) AS p FROM test GROUP BY gid ORDER BY p ASC option max_matches=2 SELECT *, AVG(price) AS p FROM test WHERE price>4 GROUP BY gid ORDER BY p ASC option max_matches=2 sphinx-2.0.4-release/test/test_172/model.bin0000644000176700017710000000262011541725604020071 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:3:{s:8:"sphinxql";s:85:"SELECT *, AVG(price) AS p FROM test GROUP BY gid ORDER BY p DESC option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:7:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"gid";s:1:"3";s:5:"price";s:2:"30";s:1:"p";s:9:"30.000000";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"1";}i:1;a:7:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:1:"2";s:5:"price";s:2:"15";s:1:"p";s:9:"15.000000";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}i:1;a:3:{s:8:"sphinxql";s:84:"SELECT *, AVG(price) AS p FROM test GROUP BY gid ORDER BY p ASC option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:7:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"gid";s:1:"5";s:5:"price";s:1:"1";s:1:"p";s:8:"1.000000";s:8:"@groupby";s:1:"5";s:6:"@count";s:1:"1";}i:1;a:7:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"gid";s:1:"6";s:5:"price";s:1:"2";s:1:"p";s:8:"2.000000";s:8:"@groupby";s:1:"6";s:6:"@count";s:1:"1";}}}i:2;a:3:{s:8:"sphinxql";s:98:"SELECT *, AVG(price) AS p FROM test WHERE price>4 GROUP BY gid ORDER BY p ASC option max_matches=2";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:7:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:1:"1";s:5:"price";s:1:"5";s:1:"p";s:8:"5.000000";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}i:1;a:7:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:3:"gid";s:1:"9";s:5:"price";s:1:"5";s:1:"p";s:8:"5.000000";s:8:"@groupby";s:1:"9";s:6:"@count";s:1:"1";}}}}}sphinx-2.0.4-release/test/ubertest.php0000644000176700017710000002021611547604224017276 0ustar deogardeogar [OPTIONS] [TESTDIRS ...]\n" ); print ( "\nModes are:\n" ); print ( "g, gen\t\t\tgenerate reference ('model') test results\n" ); print ( "t, test\t\t\trun tests and compare results to reference\n" ); print ( "qt\t\t\tsame as test, but skips user-configured slow tests\n" ); print ( "\nOptions are:\n" ); print ( "-u, --user \tuse 'USER' as MySQL user\n" ); print ( "-p, --password \tuse 'PASS' as MySQL password\n" ); print ( "-i, --indexer \tpath to indexer\n" ); print ( "-s, --searchd \tpath to searchd\n" ); print ( "--strict\t\tterminate on the first failure (for automatic runs)\n" ); print ( "--strict-verbose\tterminate on the first failure and copy the last report to report.txt (for automatic runs)\n" ); print ( "--managed\t\tdon't run searchd during test (for debugging)\n" ); print ( "--skip-indexer\t\tskip DB creation and indexer stages and go directly to queries/custom tests\n"); print ( "--rt\t\t\ttest RT backend (auto-convert all local indexes)\n" ); print ( "--no-drop-db\t\tKeep test db tables after the test (for debugging)\n"); print ( "--no-demo\t\tJust skip all tests without models. Else - run them, but never fail (for debugging)\n"); print ( "--no-marks\t\tDon't mark the output of every test in the logs.\n"); print ( "\nEnvironment variables are:\n" ); print ( "DBUSER\t\t\tuse 'USER' as MySQL user\n" ); print ( "DBPASS\t\t\tuse 'PASS' as MySQL password\n" ); print ( "\nTests can be specified by full name, or list of IDs, or range of IDs.\n" ); print ( "\nUsage examples:\n" ); print ( "php ubertest.php gen\n" ); print ( "php ubertest.php t --user test --password test\n" ); print ( "php ubertest.php t test_015\n" ); print ( "php ubertest.php t 31 37 41 53-64\n" ); print ( "DBPASS=test make check\n" ); exit ( 0 ); } $locals = array(); $locals['rt_mode'] = false; if ( array_key_exists ( "DBUSER", $_ENV ) && $_ENV["DBUSER"] ) $locals['db-user'] = $_ENV["DBUSER"]; if ( array_key_exists ( "DBPASS", $_ENV ) && $_ENV["DBPASS"] ) $locals['db-password'] = $_ENV["DBPASS"]; $run = false; $test_dirs = array(); $test_range = array(); $user_skip = false; for ( $i=0; $i=$test_range[0] && $test_id<=$test_range[1] ) ) { $tests[] = $entry; } } sort ( $tests ); // full name to short alias function ShortTestName ( $full ) { if ( substr ( $full,0,5 )=="test_" ) return substr ( $full, 5 ); return $full; } // run tests $total_tests = 0; $total_tests_failed = 0; $total_subtests = 0; $total_subtests_failed = 0; $total_skipped = $user_skipped; $failed_tests = array(); foreach ( $tests as $test ) { if ( $windows && !$sd_managed_searchd ) { // avoid an issue with daemons stuck in exit(0) for some seconds $sd_port += 10; $agent_port += 10; $agent_port_sql += 10; $agents = array ( array ( "address" => $sd_address, "port" => $sd_port, "sqlport" => $sd_sphinxql_port ), array ( "address" => $agent_address, "port" => $agent_port, "sqlport" => $agent_port_sql ), array ( "address" => $agent_address, "port" => $agent_port+1, "sqlport" => $agent_port_sql+1 ) ); } if ( file_exists ( $test."/test.xml" ) ) { $total_tests++; $res = RunTest ( $test, $g_skipdemo, $g_usemarks ); if ( !is_array($res) ) { // failed to run that test at all $total_tests_failed++; $failed_tests[] = ShortTestName ( $test ); continue; } $total_subtests += $res["tests_total"]; $total_skipped += $res["tests_skipped"]; if ( $res["tests_failed"] ) { $total_tests_failed++; $total_subtests_failed += $res["tests_failed"]; $failed_tests[] = ShortTestName ( $test ); if ( $g_strict ) { if ( $g_strictverbose ) { $report = file_get_contents ( "$test/report.txt" ); $report.= "\n Test $test failed\n"; file_put_contents("report.txt",$report); $report = ""; } break; } } } elseif ( file_exists ( $test."/test.inc" ) ) { $run_func = create_function ( '$test_path', file_get_contents ( $test."/test.inc" ) ); $total_tests++; $total_subtests++; if ( !$run_func ( $test ) ) { $total_tests_failed++; $total_subtests_failed++; $failed_tests[] = ShortTestName ( $test ); } } } // cleanup @unlink ( "config.conf" ); @unlink ( "error.txt" ); $nfile = 1; while ( file_exists ( "config_$nfile.conf" ) ) { @unlink ( "config_$nfile.conf" ); $nfile++; } $nfile = 1; while ( file_exists ( "error_$nfile.txt" ) ) { @unlink ( "error_$nfile.txt" ); $nfile++; } // summarize if ( $total_tests_failed ) { printf ( "\nTo re-run failed tests only:\nphp ubertest.php t %s\n", join ( " ", $failed_tests ) ); printf ( "\n%d of %d tests and %d of %d subtests failed, %d tests skipped, %.2f sec elapsed\nTHERE WERE FAILURES!\n", $total_tests_failed, $total_tests, $total_subtests_failed, $total_subtests,$total_skipped, MyMicrotime()-$t ); exit ( 1 ); } else { printf ( "\n%d tests and %d subtests succesful, %d tests skipped, %.2f sec elapsed\nALL OK\n", $total_tests, $total_subtests, $total_skipped, MyMicrotime()-$t ); exit ( 0 ); } // // $Id: ubertest.php 2765 2011-04-08 13:07:32Z klirichek $ // ?> sphinx-2.0.4-release/test/test_108/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_108/data2.xml0000644000176700017710000000173611326341312020011 0ustar deogardeogar test one here is string attribute ord1 two three test1 is attr ordd field this is my test document number one. also checking search within phrases. test one here is the second string attribute ord2 bka ss cc test2 is attr orddf field this is my test document number two. also checking search within phrases. sphinx-2.0.4-release/test/test_108/data.xml0000644000176700017710000000121311326341312017715 0ustar deogardeogar test one here is string attribute ord1 two three test1 is attr ordd field this is my test document number one. also checking search within phrases. test one here is the second string attribute ord2 bka ss cc test2 is attr orddf field this is my test document number two. also checking search within phrases. sphinx-2.0.4-release/test/test_108/test.xml0000644000176700017710000000177411326341312017777 0ustar deogardeogar xmlpipe2 string and wordcount attributes, defined in schema or in config indexer { mem_limit = 16M } searchd { } source src { type = xmlpipe2 xmlpipe_command = cat /data.xml xmlpipe_field = title xmlpipe_field = content xmlpipe_attr_uint = gid xmlpipe_attr_string = sgid xmlpipe_attr_wordcount = ordd xmlpipe_field_string = sgidf xmlpipe_field_wordcount = orddf } source src2 { type = xmlpipe2 xmlpipe_command = cat /data2.xml } index idx { source = src path = /idx charset_type = utf-8 } index idx2 { source = src2 path = /idx2 charset_type = utf-8 } test ord2 orddf test2 test ord2 orddf test2 sphinx-2.0.4-release/test/test_108/model.bin0000644000176700017710000001207211326341312020061 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:5:{s:3:"gid";i:0;s:4:"sgid";s:24:"here is string attribute";s:4:"ordd";i:3;s:5:"sgidf";s:13:"test1 is attr";s:5:"orddf";i:2;}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:5:{s:3:"gid";i:0;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ord2";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"ord2";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:0;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"orddf";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"orddf";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:0;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"test2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test2";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";s:24:"here is string attribute";s:4:"ordd";i:3;s:5:"sgidf";s:13:"test1 is attr";s:5:"orddf";i:2;}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"ord2";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"ord2";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"orddf";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"orddf";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:4:{i:0;s:5:"title";i:1;s:7:"content";i:2;s:5:"sgidf";i:3;s:5:"orddf";}s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";i:7;s:4:"ordd";i:1;s:5:"sgidf";i:7;s:5:"orddf";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:4:"sgid";s:35:"here is the second string attribute";s:4:"ordd";i:4;s:5:"sgidf";s:13:"test2 is attr";s:5:"orddf";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"test2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test2";}}}sphinx-2.0.4-release/test/Makefile0000644000176700017710000000026010727543542016371 0ustar deogardeogarall: install: install-strip: uninstall: TAGS: info: dist: check: php ubertest.php t -u test clean: sh clean.sh distclean: clean mostlyclean: clean maintainer-clean: clean sphinx-2.0.4-release/test/test_028/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_028/test.inc0000644000176700017710000000233211015271606017743 0ustar deogardeogarprintf ( "testing $test_path, spelldump... " ); $windows = isset($_SERVER["WINDIR"]) || isset($_SERVER["windir"]) || isset($_SERVER["HOMEDRIVE"]); if ( $windows ) $spelldump_path = "..\\bin\\debug\\spelldump"; else $spelldump_path = "../src/spelldump"; global $g_model; if ( $g_model ) { exec ( "$spelldump_path $test_path/model.dict $test_path/model.aff $test_path/model.spell", $error, $retval ); if ( !file_exists ( "$test_path/model.spell" ) ) return false; printf ( "done; 1/1 subtests OK\n" ); return true; } exec ( "$spelldump_path $test_path/model.dict $test_path/model.aff $test_path/current.spell", $error, $retval ); if ( !file_exists ( "$test_path/current.spell" ) ) return false; $model = file_get_contents ( "$test_path/model.spell" ); $result = file_get_contents ( "$test_path/current.spell" ); if ( $model != $result ) { if ( $windows ) system ( "diff -u3 $test_path/model.spell $test_path/current.spell > $test_path/report.txt" ); else system ( "diff $test_path/model.spell $test_path/current.spell > $test_path/report.txt" ); printf ( "FAILED\n" ); return false; } unlink ( "$test_path/current.spell" ); printf ( "done; 1/1 subtests OK\n" ); return true;sphinx-2.0.4-release/test/test_028/model.dict0000644000176700017710000000057111015271606020241 0ustar deogardeogarANSI enter/DGRS disposed/I natural/PSY create/ADGNSVX prevent/DGRSV multiply/DGNRSXZ fall/GMNRS weak/NPRTXY twenty/HS hundred/HS messy/PRTY quick/NPRTXY file/DGJMRSZ cross/DGJRSYZ imply/DGNSX convey/DGRSZ late/DPRTY dirty/DGPRSTY gray/DGPRSTY small/PRT skate/DGRSZ build/DGJRSZ lash/DGJRS cough/DGRS fix/DGJRSZ bat/DGMRS cloudy/PRTY dog/MS anorexiasphinx-2.0.4-release/test/test_028/model.aff0000644000176700017710000000457311015271606020060 0ustar deogardeogarwordchars a A wordchars [bc] [BC] wordchars [de] [DE] wordchars [f-i] [F-I] wordchars [j-n] [J-N] wordchars o O wordchars [p-s] [P-S] wordchars [tu] [TU] wordchars [v-y] [V-Y] wordchars z Z prefixes flag *A: . > RE # As in enter > reenter flag *I: . > IN # As in disposed > indisposed flag *U: . > UN # As in natural > unnatural suffixes flag V: E > -E,IVE # As in create > creative [^E] > IVE # As in prevent > preventive flag *N: E > -E,ION # As in create > creation Y > -Y,ICATION # As in multiply > multiplication [^EY] > EN # As in fall > fallen flag *X: E > -E,IONS # As in create > creations Y > -Y,ICATIONS # As in multiply > multiplications [^EY] > ENS # As in weak > weakens flag H: Y > -Y,IETH # As in twenty > twentieth [^Y] > TH # As in hundred > hundredth flag *Y: Y > -Y,ILY # As in messy > messily [^Y] > LY # As in quick > quickly flag *G: E > -E,ING # As in file > filing [^E] > ING # As in cross > crossing flag *J: E > -E,INGS # As in file > filings [^E] > INGS # As in cross > crossings flag *D: E > D # As in create > created [^AEIOU]Y > -Y,IED # As in imply > implied [^EY] > ED # As in cross > crossed [AEIOU]Y > ED # As in convey > conveyed flag T: E > ST # As in late > latest [^AEIOU]Y > -Y,IEST # As in dirty > dirtiest [AEIOU]Y > EST # As in gray > grayest [^EY] > EST # As in small > smallest flag *R: E > R # As in skate > skater [^AEIOU]Y > -Y,IER # As in multiply > multiplier [AEIOU]Y > ER # As in convey > conveyer [^EY] > ER # As in build > builder flag *Z: E > RS # As in skate > skaters [^AEIOU]Y > -Y,IERS # As in multiply > multipliers [AEIOU]Y > ERS # As in convey > conveyers [^EY] > ERS # As in build > builders flag *S: [^AEIOU]Y > -Y,IES # As in imply > implies [AEIOU]Y > S # As in convey > conveys [CST]H > ES # As in lash > lashes (some TH's...) [^CST]H > S # As in cough > coughs [SXZ] > ES # As in fix > fixes [^SXZHY] > S # As in bat > bats flag *P: [^AEIOU]Y > -Y,INESS # As in cloudy > cloudiness [AEIOU]Y > NESS # As in gray > grayness [^Y] > NESS # As in late > lateness flag *M: . > 'S # As in dog > dog'ssphinx-2.0.4-release/test/test_028/model.spell0000644000176700017710000000527411146310732020441 0ustar deogardeogarANSI > ANSI anorexia > anorexia bat > bat bat's > bat bated > bat bater > bat bating > bat bats > bat build > build builded > build builder > build builders > build building > build buildings > build builds > build cloudier > cloudy cloudiest > cloudy cloudily > cloudy cloudiness > cloudy cloudy > cloudy convey > convey conveyed > convey conveyer > convey conveyers > convey conveying > convey conveys > convey cough > cough coughed > cough cougher > cough coughing > cough coughs > cough create > create created > create creates > create creating > create creation > create creations > create creative > create cross > cross crossed > cross crosser > cross crossers > cross crosses > cross crossing > cross crossings > cross crossly > cross dirtied > dirty dirtier > dirty dirties > dirty dirtiest > dirty dirtily > dirty dirtiness > dirty dirty > dirty dirtying > dirty disposed > disposed dog > dog dog's > dog dogs > dog enter > enter entered > enter enterer > enter entering > enter enters > enter fall > fall fall's > fall fallen > fall faller > fall falling > fall falls > fall file > file file's > file filed > file filer > file filers > file files > file filing > file filings > file fix > fix fixed > fix fixer > fix fixers > fix fixes > fix fixing > fix fixings > fix graily > gray gray > gray grayed > gray grayer > gray grayest > gray graying > gray grayness > gray grays > gray hundred > hundred hundreds > hundred hundredth > hundred implication > imply implications > imply implied > imply implies > imply imply > imply implying > imply indisposed > disposed lash > lash lashed > lash lasher > lash lashes > lash lashing > lash lashings > lash late > late lated > late lately > late lateness > late later > late latest > late messier > messy messiest > messy messily > messy messiness > messy messy > messy multiplication > multiply multiplications > multiply multiplied > multiply multiplier > multiply multipliers > multiply multiplies > multiply multiply > multiply multiplying > multiply natural > natural naturally > natural naturalness > natural naturals > natural prevent > prevent prevented > prevent preventer > prevent preventing > prevent preventive > prevent prevents > prevent quick > quick quicken > quick quickens > quick quicker > quick quickest > quick quickly > quick quickness > quick recreate > create recreated > create recreates > create recreating > create recreation > create recreations > create skate > skate skated > skate skater > skate skaters > skate skates > skate skating > skate small > small smaller > small smallest > small smallness > small twenties > twenty twentieth > twenty twenty > twenty weak > weak weaken > weak weakens > weak weaker > weak weakest > weak weakly > weak weakness > weak sphinx-2.0.4-release/test/test_066/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_066/dbgen.php0000644000176700017710000000113511274562433020073 0ustar deogardeogar>âãòJ??âãòJ@@âãòJAAâãòJBBâãòJCCâãòJDDâãòJEEâãòJFFâãòJGGâãòJHHâãòJIIâãòJJJâãòJKKâãòJLLâãòJMMâãòJNNâãòJOOâãòJPPâãòJQQâãòJRRâãòJSSâãòJTTâãòJUUâãòJVVâãòJWWâãòJXXâãòJYYâãòJZZâãòJ[[âãòJ\\âãòJ]]âãòJ^^âãòJ__âãòJ``âãòJaaâãòJbbâãòJccâãòJddâãòJeeâãòJffâãòJggâãòJhhâãòJiiâãòJjjâãòJkkâãòJllâãòJmmâãòJnnâãòJooâãòJppâãòJqqâãòJrrâãòJssâãòJttâãòJuuâãòJvvâãòJwwâãòJxxâãòJyyâãòJzzâãòJ{{âãòJ||âãòJ}}âãòJ~~âãòJâãòJ€€âãòJâãòJ‚‚âãòJƒƒâãòJ„„âãòJ……âãòJ††âãòJ‡‡âãòJˆˆâãòJ‰‰âãòJŠŠâãòJ‹‹âãòJŒŒâãòJâãòJŽŽâãòJâãòJâãòJ‘‘âãòJ’’âãòJ““âãòJ””âãòJ••âãòJ––âãòJ——âãòJ˜˜âãòJ™™âãòJššâãòJ››âãòJœœâãòJâãòJžžâãòJŸŸâãòJ  âãòJ¡¡âãòJ¢¢âãòJ££âãòJ¤¤âãòJ¥¥âãòJ¦¦âãòJ§§âãòJ¨¨âãòJ©©âãòJªªâãòJ««âãòJ¬¬âãòJ­­âãòJ®®âãòJ¯¯âãòJ°°âãòJ±±âãòJ²²âãòJ³³âãòJ´´âãòJµµâãòJ¶¶âãòJ··âãòJ¸¸âãòJ¹¹âãòJººâãòJ»»âãòJ¼¼âãòJ½½âãòJ¾¾âãòJ¿¿âãòJÀÀâãòJÁÁâãòJÂÂâãòJÃÃâãòJÄÄâãòJÅÅâãòJÆÆâãòJÇÇâãòJsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spi0000644000176700017710000000023411300043774022447 0ustar deogardeogarŽª”f‚Àü`‚¿ÅÅ|–úíUƒ†ÕÄ.屄,ÞÅ©GÃ,ɦõ ŠÖÜy®ºñW´ä©d ¾žÎo›‹åfùЈ7 ‡†ï2×ç ¿¶öd ô…‘7fŠÊsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spp0000644000176700017710000000024611300043774022461 0ustar deogardeogar„€€ˆ€€Œ€€ ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€Œ€€„€€„€€ˆ€€ „€€ˆ€€ „€€ˆ€€ˆ€€ˆ€€ˆ€€ ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€ˆ€€Œ€€ˆ€€ˆ€€ˆ€€ˆ€€sphinx-2.0.4-release/test/test_066/refdata/v19noattrs.sph0000644000176700017710000000035411300043774022451 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿŒÁ@sphinx-2.0.4-release/test/test_066/refdata/v8.sph0000644000176700017710000000043011551264745020762 0ustar deogardeogarSPHXadtextÿÿÿÿÿÿÿÿÿÿÿÿheadingÿÿÿÿÿÿÿÿÿÿÿÿpostcodeÿÿÿÿÿÿÿÿÿÿÿÿlng lat section@ make_id` transmission_id€ rJsphinx-2.0.4-release/test/test_066/refdata/v8.spa0000644000176700017710000000011011551264745020746 0ustar deogardeogar|Œ£½9p?wÖ£½yXd?29Á“½5@m?sphinx-2.0.4-release/test/test_066/refdata/v8.spi0000644000176700017710000000017211551264745020766 0ustar deogardeogar²ØŠ®±ÖJ°•Ó1ɱ×[ƒ”·ª=Œ´ölÝÅÙaµûÂTÃê·'¦¿Êƒ¯ÅÚ’ÌXÀˆ’sÅÆê}»ÇòwVsphinx-2.0.4-release/test/test_066/refdata/v20.spm0000644000176700017710000000000011274562433021027 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.sps0000644000176700017710000000000111300043774022451 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v14.sph0000644000176700017710000000050211206522304021020 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿgroup_id group_id2 date_added@ ŒÁsphinx-2.0.4-release/test/test_066/refdata/v19.spp0000644000176700017710000000000111274562433021043 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v14.spm0000644000176700017710000000000011206522304021016 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v8.spd0000644000176700017710000000013011551264745020753 0ustar deogardeogarŒò2Œò2 Œò2Œò2"',Œò2.Œò235:?sphinx-2.0.4-release/test/test_066/refdata/v8.spm0000644000176700017710000000000011551264745020760 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19.spd0000644000176700017710000000351111274562433021040 0ustar deogardeogarH_(L[ $7c ;!Y.A*E%}2>UBQym *ie.4so8w 9nr5 v l/dh+$+D/@X xCP?T3|MZ)I^!:b%6 ~=V FAR&z1>E-B"'4#8OG`K\<;3tp7)jf- 1,gk(0 :6qu2N"9&5=J]Fa {'@SG<W#,CD0?sphinx-2.0.4-release/test/test_066/refdata/v20.spa0000644000176700017710000000463411274562433021034 0ustar deogardeogarâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJ âãòJ âãòJ âãòJ âãòJ âãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJâãòJ âãòJ!!âãòJ""âãòJ##âãòJ$$âãòJ%%âãòJ&&âãòJ''âãòJ((âãòJ))âãòJ**âãòJ++âãòJ,,âãòJ--âãòJ..âãòJ//âãòJ00âãòJ11âãòJ22âãòJ33âãòJ44âãòJ55âãòJ66âãòJ77âãòJ88âãòJ99âãòJ::âãòJ;;âãòJ<<âãòJ==âãòJ>>âãòJ??âãòJ@@âãòJAAâãòJBBâãòJCCâãòJDDâãòJEEâãòJFFâãòJGGâãòJHHâãòJIIâãòJJJâãòJKKâãòJLLâãòJMMâãòJNNâãòJOOâãòJPPâãòJQQâãòJRRâãòJSSâãòJTTâãòJUUâãòJVVâãòJWWâãòJXXâãòJYYâãòJZZâãòJ[[âãòJ\\âãòJ]]âãòJ^^âãòJ__âãòJ``âãòJaaâãòJbbâãòJccâãòJddâãòJeeâãòJffâãòJggâãòJhhâãòJiiâãòJjjâãòJkkâãòJllâãòJmmâãòJnnâãòJooâãòJppâãòJqqâãòJrrâãòJssâãòJttâãòJuuâãòJvvâãòJwwâãòJxxâãòJyyâãòJzzâãòJ{{âãòJ||âãòJ}}âãòJ~~âãòJâãòJ€€âãòJâãòJ‚‚âãòJƒƒâãòJ„„âãòJ……âãòJ††âãòJ‡‡âãòJˆˆâãòJ‰‰âãòJŠŠâãòJ‹‹âãòJŒŒâãòJâãòJŽŽâãòJâãòJâãòJ‘‘âãòJ’’âãòJ““âãòJ””âãòJ••âãòJ––âãòJ——âãòJ˜˜âãòJ™™âãòJššâãòJ››âãòJœœâãòJâãòJžžâãòJŸŸâãòJ  âãòJ¡¡âãòJ¢¢âãòJ££âãòJ¤¤âãòJ¥¥âãòJ¦¦âãòJ§§âãòJ¨¨âãòJ©©âãòJªªâãòJ««âãòJ¬¬âãòJ­­âãòJ®®âãòJ¯¯âãòJ°°âãòJ±±âãòJ²²âãòJ³³âãòJ´´âãòJµµâãòJ¶¶âãòJ··âãòJ¸¸âãòJ¹¹âãòJººâãòJ»»âãòJ¼¼âãòJ½½âãòJ¾¾âãòJ¿¿âãòJÀÀâãòJÁÁâãòJÂÂâãòJÃÃâãòJÄÄâãòJÅÅâãòJÆÆâãòJÇÇâãòJâãòJ€€âãòJâãòJÇÇâãòJâãòJÇÇâãòJsphinx-2.0.4-release/test/test_066/refdata/v14.spd0000644000176700017710000000022611206522304021017 0ustar deogardeogar (-6;BGLX^cmr| sphinx-2.0.4-release/test/test_066/refdata/v19.sps0000644000176700017710000000000111274562433021046 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v14.spa0000644000176700017710000000010011206522304021003 0ustar deogardeogar VˆJ VˆJ VˆJ VˆJsphinx-2.0.4-release/test/test_066/refdata/v8.spp0000644000176700017710000000010411551264745020770 0ustar deogardeogar€€€€ˆ€€ˆ€€ˆ€€€€€€ˆ€€ˆ€€€€€€ˆ€€sphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spa0000644000176700017710000000000011300043774022426 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spm0000644000176700017710000000000011300043774022442 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v14.spi0000644000176700017710000000023411206522304021023 0ustar deogardeogarŽª”f‚Àü`‚¿ÅÅ|–úíUƒ†ÕÄ.屄,ÞÅ©GÃ,ɦõ ŠÖÜy®ºñW´ä©d ¾žÎo›‹åfùЈ7 ‡†ï2×ç ¿¶öd ô…‘7fŠÊsphinx-2.0.4-release/test/test_066/refdata/v19.spi0000644000176700017710000000257711274562433021060 0ustar deogardeogar°íOŽÓâ4‡•Ð+ˆÜ ŒˆÛcùÐÚ½‰¿óz‰¢œ†‰î8‚ù°„Ö£T´…•°#Š–µ!‹”Å\‡¦£l‡„Ÿ3‡ï¬‚âðì£hƒ¹ól…Žð%¢“X‰Ã G™¿Üêh¢“L‡Ž-ƒè÷l⎩¼!˜Û¼HŠ–ŽH…–«l†Ò÷w«Õ<•®.ƒ’›LŽÓÒL‹¬ø ŽÔ½DÖ•(–¤4õïšôø‹üDˆÀ„š†Hšìø!ŠÙÓËÅ4•³ˆúŽH‚Ò›t†¿Óz¥«‰_–‰î¶êHëìL‰•‡iŠÒ”,†™ï–¿ü‘”„4„눑“ë<ŒÖ¤ ‰ý—<‘´4„õ°-›ÓXƒ½ótƒ™°Õãh„ÀÀ‰„ž5ˆâœ,ŽÝïNËõL™Ð„—§t†õÐ%‡ðÛgåî8Ú–(‘”´L‚“„Ôâ“t†î#Š¥Ë!‡À¤ëÜ4„îð+Œ˜ÛUˆ«wƒ‚ð‡ƒŸK†æ£T†ónŽÓåD‡†Ï]‹ÑÇ{¥œ(ˆ¯ÓiêÏkˆûžM„¿ó~¬<‚ê¯c¦ä×Çh‚ê¬4…†¯UèÔ(‰¼Þ7“©ê8ŽóÌ<‚ñïe„™˜4…ïS‡Ð£y‚†h˜¿ÜŒ¤D‡[ƒ×ø ñm‡žœ,†üŸ3Šê£lŽ‚—L¥©ˆŽÞ›LŠâ2ŽÓÕ<‹®÷wŽÔº4ŽÅ–XŒ”<”˜ãe†Þ”‹û4‰À„ŽóüD—ˆøŽ™ïÚ¤4‰êÔyËÂD‰š«TŒ–ÜÌ­L‡¿Ó~ž›t‚–ŽH˜Ïˆ‘³Å4‹‹Å,š¤ ªîøê˜ZGG†Õã(†‘”ƒD„é‡i‘“ìL–ä¼8މî8ƒª¬ „æ¯[ƒæÈ,ƒŠ¯mŽ©Å_‘‘ïR…®œ,ˆüž5‡ÏÔŠÏe„¨¨†æÏS‰åÍa‘É•X‹ª£T…êh‘c†Á¿lœÜ(Œû̆ýUŠ–Ì_†À¤ëÛD„ýï]Ñ”(‡ßo‹æäƒ‘ïkÚÄaÏ6,sphinx-2.0.4-release/test/test_066/refdata/v14.spk0000644000176700017710000000000011206522304021014 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19.spm0000644000176700017710000000000011274562433021037 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v20.spp0000644000176700017710000000000111274562433021033 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spk0000644000176700017710000000000011300043774022440 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19noattrs.spd0000644000176700017710000000022611300043774022443 0ustar deogardeogar (-6;BGLX^cmr| sphinx-2.0.4-release/test/test_066/refdata/v19.sph0000644000176700017710000000047411274562433021051 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿgroup_id date_added oÇ!@, U+23sphinx-2.0.4-release/test/test_066/refdata/v20.sph0000644000176700017710000000050011274562433021027 0ustar deogardeogarSPHXtitleÿÿÿÿÿÿÿÿÿÿÿÿcontentÿÿÿÿÿÿÿÿÿÿÿÿgroup_id date_added oÇ!@, U+23Usphinx-2.0.4-release/test/test_066/refdata/v20.spd0000644000176700017710000000351111274562433021030 0ustar deogardeogarH_(L[ $7c ;!Y.A*E%}2>UBQym *ie.4so8w 9nr5 v l/dh+$+D/@X xCP?T3|MZ)I^!:b%6 ~=V FAR&z1>E-B"'4#8OG`K\<;3tp7)jf- 1,gk(0 :6qu2N"9&5=J]Fa {'@SG<W#,CD0?sphinx-2.0.4-release/test/test_066/refdata/v20.spk0000644000176700017710000000000011274562433021025 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v20.sps0000644000176700017710000000000111274562433021036 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v19.spk0000644000176700017710000000000011274562433021035 0ustar deogardeogarsphinx-2.0.4-release/test/test_066/refdata/v20.spi0000644000176700017710000000257711274562433021050 0ustar deogardeogar°íOŽÓâ4‡•Ð+ˆÜ ŒˆÛcùÐÚ½‰¿óz‰¢œ†‰î8‚ù°„Ö£T´…•°#Š–µ!‹”Å\‡¦£l‡„Ÿ3‡ï¬‚âðì£hƒ¹ól…Žð%¢“X‰Ã G™¿Üêh¢“L‡Ž-ƒè÷l⎩¼!˜Û¼HŠ–ŽH…–«l†Ò÷w«Õ<•®.ƒ’›LŽÓÒL‹¬ø ŽÔ½DÖ•(–¤4õïšôø‹üDˆÀ„š†Hšìø!ŠÙÓËÅ4•³ˆúŽH‚Ò›t†¿Óz¥«‰_–‰î¶êHëìL‰•‡iŠÒ”,†™ï–¿ü‘”„4„눑“ë<ŒÖ¤ ‰ý—<‘´4„õ°-›ÓXƒ½ótƒ™°Õãh„ÀÀ‰„ž5ˆâœ,ŽÝïNËõL™Ð„—§t†õÐ%‡ðÛgåî8Ú–(‘”´L‚“„Ôâ“t†î#Š¥Ë!‡À¤ëÜ4„îð+Œ˜ÛUˆ«wƒ‚ð‡ƒŸK†æ£T†ónŽÓåD‡†Ï]‹ÑÇ{¥œ(ˆ¯ÓiêÏkˆûžM„¿ó~¬<‚ê¯c¦ä×Çh‚ê¬4…†¯UèÔ(‰¼Þ7“©ê8ŽóÌ<‚ñïe„™˜4…ïS‡Ð£y‚†h˜¿ÜŒ¤D‡[ƒ×ø ñm‡žœ,†üŸ3Šê£lŽ‚—L¥©ˆŽÞ›LŠâ2ŽÓÕ<‹®÷wŽÔº4ŽÅ–XŒ”<”˜ãe†Þ”‹û4‰À„ŽóüD—ˆøŽ™ïÚ¤4‰êÔyËÂD‰š«TŒ–ÜÌ­L‡¿Ó~ž›t‚–ŽH˜Ïˆ‘³Å4‹‹Å,š¤ ªîøê˜ZGG†Õã(†‘”ƒD„é‡i‘“ìL–ä¼8މî8ƒª¬ „æ¯[ƒæÈ,ƒŠ¯mŽ©Å_‘‘ïR…®œ,ˆüž5‡ÏÔŠÏe„¨¨†æÏS‰åÍa‘É•X‹ª£T…êh‘c†Á¿lœÜ(Œû̆ýUŠ–Ì_†À¤ëÛD„ýï]Ñ”(‡ßo‹æäƒ‘ïkÚÄaÏ6,sphinx-2.0.4-release/test/test_066/test.xml0000644000176700017710000000165311551264745020014 0ustar deogardeogar index format backwards compatibility indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select 1, 'dummy' } index v14 { source = test path = /refdata/v14 } index v19 { source = test path = /refdata/v19 } index v19noattrs { source = test path = /refdata/v19noattrs } index v20 { source = test path = /refdata/v20 } index v8 { source = test path = /refdata/v8 } test test test test ford | corsa | tt sphinx-2.0.4-release/test/test_066/model.bin0000644000176700017710000001734111551264745020106 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:3:{s:8:"group_id";i:1;s:9:"group_id2";i:1;s:10:"date_added";i:2;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2421";s:5:"attrs";a:3:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:2:"11";s:10:"date_added";s:10:"1243252822";}}i:2;a:2:{s:6:"weight";s:4:"2421";s:5:"attrs";a:3:{s:8:"group_id";s:1:"1";s:9:"group_id2";s:2:"11";s:10:"date_added";s:10:"1243252822";}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:3:{s:8:"group_id";s:1:"2";s:9:"group_id2";s:2:"12";s:10:"date_added";s:10:"1243252822";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:10:"date_added";i:2;}s:7:"matches";a:20:{i:1;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1257432034";}}i:2;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"2";s:10:"date_added";s:10:"1257432034";}}i:3;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"3";s:10:"date_added";s:10:"1257432034";}}i:4;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"4";s:10:"date_added";s:10:"1257432034";}}i:5;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"5";s:10:"date_added";s:10:"1257432034";}}i:6;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"6";s:10:"date_added";s:10:"1257432034";}}i:7;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"7";s:10:"date_added";s:10:"1257432034";}}i:8;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"8";s:10:"date_added";s:10:"1257432034";}}i:9;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"9";s:10:"date_added";s:10:"1257432034";}}i:10;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"10";s:10:"date_added";s:10:"1257432034";}}i:11;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"11";s:10:"date_added";s:10:"1257432034";}}i:12;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"12";s:10:"date_added";s:10:"1257432034";}}i:13;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"13";s:10:"date_added";s:10:"1257432034";}}i:14;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"14";s:10:"date_added";s:10:"1257432034";}}i:15;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"15";s:10:"date_added";s:10:"1257432034";}}i:16;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"16";s:10:"date_added";s:10:"1257432034";}}i:17;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"17";s:10:"date_added";s:10:"1257432034";}}i:18;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"18";s:10:"date_added";s:10:"1257432034";}}i:19;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"19";s:10:"date_added";s:10:"1257432034";}}i:20;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"20";s:10:"date_added";s:10:"1257432034";}}}s:5:"total";s:3:"199";s:11:"total_found";s:3:"199";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:3:"199";s:4:"hits";s:3:"199";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2421";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2421";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"1442";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:7:"content";}s:5:"attrs";a:2:{s:8:"group_id";i:1;s:10:"date_added";i:2;}s:7:"matches";a:20:{i:1;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"1";s:10:"date_added";s:10:"1257432034";}}i:2;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"2";s:10:"date_added";s:10:"1257432034";}}i:3;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"3";s:10:"date_added";s:10:"1257432034";}}i:4;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"4";s:10:"date_added";s:10:"1257432034";}}i:5;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"5";s:10:"date_added";s:10:"1257432034";}}i:6;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"6";s:10:"date_added";s:10:"1257432034";}}i:7;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"7";s:10:"date_added";s:10:"1257432034";}}i:8;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"8";s:10:"date_added";s:10:"1257432034";}}i:9;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:1:"9";s:10:"date_added";s:10:"1257432034";}}i:10;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"10";s:10:"date_added";s:10:"1257432034";}}i:11;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"11";s:10:"date_added";s:10:"1257432034";}}i:12;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"12";s:10:"date_added";s:10:"1257432034";}}i:13;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"13";s:10:"date_added";s:10:"1257432034";}}i:14;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"14";s:10:"date_added";s:10:"1257432034";}}i:15;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"15";s:10:"date_added";s:10:"1257432034";}}i:16;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"16";s:10:"date_added";s:10:"1257432034";}}i:17;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"17";s:10:"date_added";s:10:"1257432034";}}i:18;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"18";s:10:"date_added";s:10:"1257432034";}}i:19;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"19";s:10:"date_added";s:10:"1257432034";}}i:20;a:2:{s:6:"weight";s:4:"1272";s:5:"attrs";a:2:{s:8:"group_id";s:2:"20";s:10:"date_added";s:10:"1257432034";}}}s:5:"total";s:3:"199";s:11:"total_found";s:3:"199";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:3:"199";s:4:"hits";s:3:"199";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:5:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:7:"make_id";i:1;s:15:"transmission_id";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2582";s:5:"attrs";a:5:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"8";s:15:"transmission_id";s:1:"1";}}i:2;a:2:{s:6:"weight";s:4:"1560";s:5:"attrs";a:5:{s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:7:"section";s:1:"1";s:7:"make_id";s:1:"5";s:15:"transmission_id";s:1:"0";}}i:211250;a:2:{s:6:"weight";s:4:"1560";s:5:"attrs";a:5:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:7:"make_id";s:2:"29";s:15:"transmission_id";s:1:"1";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:4:"ford";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"2";}s:5:"corsa";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"tt";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ford | corsa | tt";}}}sphinx-2.0.4-release/test/test_189/0000755000176700017710000000000011724063141016301 5ustar deogardeogarsphinx-2.0.4-release/test/test_189/test.xml0000644000176700017710000000376311703321240020005 0ustar deogardeogar master vs agent compat mode 3 searchd { compat_sphinxql_magics = 0 compat_sphinxql_magics = 1 compat_sphinxql_magics = 1 workers = threads } index dist { type = distributed agent = :loc1 agent = :loc2 } source src_a1 { type = mysql sql_query = SELECT id, idd, 1 as agent, body FROM test_table where id in ( 1,2,3,4,5 ) sql_attr_uint = agent sql_attr_uint = idd } index loc1 { source = src_a1 docinfo = extern charset_type = utf-8 path = /a1 } source src_a2 { type = mysql sql_query = SELECT id, idd, 2 as agent, body FROM test_table where id in ( 5,6,7, 8, 9 ) sql_attr_uint = agent sql_attr_uint = idd } index loc2 { source = src_a2 docinfo = extern charset_type = utf-8 path = /a2 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd` int(11) NOT NULL default '0', `body` varchar(1024) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 'the dog' ), ( 2, 10, 'the cat' ), ( 3, 2, 'the bird' ), ( 4, 11, 'cat eats bird' ), ( 5, 3, 'dog eats cat' ), ( 6, 12, 'bird' ), ( 7, 4, 'the cat' ), ( 8, 5, 'eats' ), ( 9, 13, 'the' ) select * from dist select idd, agent from dist group by idd select *, idd, agent from dist group by idd select idd, @count, agent from dist group by idd select idd, count(*), agent from dist group by idd sphinx-2.0.4-release/test/test_189/model.bin0000644000176700017710000000660111703321240020070 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:3:{s:8:"sphinxql";s:18:"select * from dist";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"idd";s:1:"1";s:5:"agent";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"idd";s:2:"10";s:5:"agent";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"idd";s:1:"2";s:5:"agent";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:3:"idd";s:2:"11";s:5:"agent";s:1:"1";}i:4;a:3:{s:2:"id";s:1:"5";s:3:"idd";s:1:"3";s:5:"agent";s:1:"2";}i:5;a:3:{s:2:"id";s:1:"6";s:3:"idd";s:2:"12";s:5:"agent";s:1:"2";}i:6;a:3:{s:2:"id";s:1:"7";s:3:"idd";s:1:"4";s:5:"agent";s:1:"2";}i:7;a:3:{s:2:"id";s:1:"8";s:3:"idd";s:1:"5";s:5:"agent";s:1:"2";}i:8;a:3:{s:2:"id";s:1:"9";s:3:"idd";s:2:"13";s:5:"agent";s:1:"2";}}}i:1;a:3:{s:8:"sphinxql";s:40:"select idd, agent from dist group by idd";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:2:{s:3:"idd";s:1:"1";s:5:"agent";s:1:"1";}i:1;a:2:{s:3:"idd";s:2:"10";s:5:"agent";s:1:"1";}i:2;a:2:{s:3:"idd";s:1:"2";s:5:"agent";s:1:"1";}i:3;a:2:{s:3:"idd";s:2:"11";s:5:"agent";s:1:"1";}i:4;a:2:{s:3:"idd";s:1:"3";s:5:"agent";s:1:"1";}i:5;a:2:{s:3:"idd";s:2:"12";s:5:"agent";s:1:"2";}i:6;a:2:{s:3:"idd";s:1:"4";s:5:"agent";s:1:"2";}i:7;a:2:{s:3:"idd";s:1:"5";s:5:"agent";s:1:"2";}i:8;a:2:{s:3:"idd";s:2:"13";s:5:"agent";s:1:"2";}}}i:2;a:3:{s:8:"sphinxql";s:43:"select *, idd, agent from dist group by idd";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"idd";s:1:"1";s:5:"agent";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"idd";s:2:"10";s:5:"agent";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"idd";s:1:"2";s:5:"agent";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:3:"idd";s:2:"11";s:5:"agent";s:1:"1";}i:4;a:3:{s:2:"id";s:1:"5";s:3:"idd";s:1:"3";s:5:"agent";s:1:"1";}i:5;a:3:{s:2:"id";s:1:"6";s:3:"idd";s:2:"12";s:5:"agent";s:1:"2";}i:6;a:3:{s:2:"id";s:1:"7";s:3:"idd";s:1:"4";s:5:"agent";s:1:"2";}i:7;a:3:{s:2:"id";s:1:"8";s:3:"idd";s:1:"5";s:5:"agent";s:1:"2";}i:8;a:3:{s:2:"id";s:1:"9";s:3:"idd";s:2:"13";s:5:"agent";s:1:"2";}}}i:3;a:3:{s:8:"sphinxql";s:48:"select idd, @count, agent from dist group by idd";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:3:"idd";s:1:"1";s:6:"@count";s:1:"1";s:5:"agent";s:1:"1";}i:1;a:3:{s:3:"idd";s:2:"10";s:6:"@count";s:1:"1";s:5:"agent";s:1:"1";}i:2;a:3:{s:3:"idd";s:1:"2";s:6:"@count";s:1:"1";s:5:"agent";s:1:"1";}i:3;a:3:{s:3:"idd";s:2:"11";s:6:"@count";s:1:"1";s:5:"agent";s:1:"1";}i:4;a:3:{s:3:"idd";s:1:"3";s:6:"@count";s:1:"2";s:5:"agent";s:1:"1";}i:5;a:3:{s:3:"idd";s:2:"12";s:6:"@count";s:1:"1";s:5:"agent";s:1:"2";}i:6;a:3:{s:3:"idd";s:1:"4";s:6:"@count";s:1:"1";s:5:"agent";s:1:"2";}i:7;a:3:{s:3:"idd";s:1:"5";s:6:"@count";s:1:"1";s:5:"agent";s:1:"2";}i:8;a:3:{s:3:"idd";s:2:"13";s:6:"@count";s:1:"1";s:5:"agent";s:1:"2";}}}i:4;a:3:{s:8:"sphinxql";s:50:"select idd, count(*), agent from dist group by idd";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:3:{s:3:"idd";s:1:"1";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"1";}i:1;a:3:{s:3:"idd";s:2:"10";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"1";}i:2;a:3:{s:3:"idd";s:1:"2";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"1";}i:3;a:3:{s:3:"idd";s:2:"11";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"1";}i:4;a:3:{s:3:"idd";s:1:"3";s:8:"count(*)";s:1:"2";s:5:"agent";s:1:"1";}i:5;a:3:{s:3:"idd";s:2:"12";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"2";}i:6;a:3:{s:3:"idd";s:1:"4";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"2";}i:7;a:3:{s:3:"idd";s:1:"5";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"2";}i:8;a:3:{s:3:"idd";s:2:"13";s:8:"count(*)";s:1:"1";s:5:"agent";s:1:"2";}}}}}sphinx-2.0.4-release/test/test_044/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_044/test.xml0000644000176700017710000000376611323711623020004 0ustar deogardeogar kill-list (part 1) indexer { mem_limit = 16M } searchd { } source src_main { type = mysql sql_query = SELECT * FROM test_table } source src_delta1 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=6 sql_query_killlist = SELECT 1 } source src_delta2 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=7 sql_query_killlist = SELECT 2 } source src_delta3 { type = mysql sql_query = SELECT * FROM test_table WHERE document_id=8 sql_query_killlist = SELECT 3 } index idx_main { source = src_main path = /test_main } index idx_delta1 { source = src_delta1 path = /test_delta1 } index idx_delta2 { source = src_delta2 path = /test_delta2 } index idx_delta3 { source = src_delta3 path = /test_delta3 } CREATE TABLE test_table ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 'test one' ), ( 2, 'test two' ), ( 3, 'test three' ), ( 4, 'test four' ), ( 5, 'test five' ), ( 6, 'test delta1' ), ( 7, 'test delta2' ), ( 8, 'test delta3' ) test test test test test test test test sphinx-2.0.4-release/test/test_044/model.bin0000644000176700017710000001155311070750004020061 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:8:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"8";s:11:"total_found";s:1:"8";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"test";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:7:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"7";s:11:"total_found";s:1:"7";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:4:"test";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:7:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"7";s:11:"total_found";s:1:"7";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:4:"test";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:7:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"7";s:11:"total_found";s:1:"7";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:4:"test";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:6:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.005";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:4:"test";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:6:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"6";s:11:"total_found";s:1:"6";s:4:"time";s:5:"0.007";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:4:"test";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.006";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:5:"query";s:4:"test";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.007";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:2:"11";s:4:"hits";s:2:"11";}}s:5:"query";s:4:"test";}}}sphinx-2.0.4-release/test/test_164/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_164/test.xml0000644000176700017710000000271111515406655020005 0ustar deogardeogar blend_mode indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = SELECT * FROM test_table } index test { source = test path = /test blend_chars = @, &, ., !, - blend_mode = trim_none blend_mode = trim_none, skip_pure blend_mode = trim_tail blend_mode = trim_head, trim_tail charset_type = sbcs charset_type = utf-8 } CREATE TABLE test_table ( id INT NOT NULL, text VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table INSERT INTO test_table VALUES ( 1, '@simple!' ), ( 2, 'hey @dude! a-team was awesome!!'), ( 3, 'hey @dude... a-team was @@!!! awesome'); simple \@simple \@simple\! simple\! "hey" "@dude!" "dude a-team" "a-team was" "@@!!!" "was @@!!! awesome" sphinx-2.0.4-release/test/test_164/model.bin0000644000176700017710000007400211712020702020060 0ustar deogardeogara:8:{i:0;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:1;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:2;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.012";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:3;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:4;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:5;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2590";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:6;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}i:7;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"simple";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"@simple";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"\@simple";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"@simple!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:"\@simple\!";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"simple!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"simple\!";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"hey";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"@dude!";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:14:""hey" "@dude!"";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"dude";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""dude a-team"";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"a-team";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""a-team was"";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""@@!!!"";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"3560";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"was";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:5:"@@!!!";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"awesome";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:19:""was @@!!! awesome"";}}}sphinx-2.0.4-release/test/test_095/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_095/test.xml0000644000176700017710000000430511606637772020020 0ustar deogardeogar merge vs killlists indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT document_id, body, document_id*10 AS xid FROM test_table WHERE document_id in (3,4,5,6) sql_attr_uint = xid } source srcdelta : srcmain { sql_query = SELECT document_id, body, document_id*100 AS xid FROM test_table WHERE document_id in (1,2) sql_query_killlist = select 4 union select 6 } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } source srcm1 { type = mysql sql_query = SELECT document_id, body, 1 xid FROM test_table WHERE document_id in (3,4,5,6) sql_attr_uint = xid sql_query_killlist = select 1 union select 2 union select 3 } source srcd1 : srcmain { sql_query = SELECT document_id, body, 2 AS xid FROM test_table WHERE document_id in (1,2) sql_query_killlist = select 4 union select 6 } index m1 { source = srcm1 path = /m1 charset_type = utf-8 } index d1 { source = srcd1 path = /d1 charset_type = utf-8 } index mmk { source = srcm1 path = /mmk charset_type = utf-8 } --merge main delta --merge m1 d1 --merge mmk d1 --merge-killlists one three four five six CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one' ), ( 2, 'two' ), ( 3, 'three blind mice, how they run' ), ( 4, 'four became from three' ), ( 5, 'five' ), ( 6, 'six' ) sphinx-2.0.4-release/test/test_095/model.bin0000644000176700017710000000577311606637772020123 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:3:"100";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"one";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:2:"30";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"three";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"three";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"four";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"four";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:2:"50";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"five";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"six";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"six";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_038/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_038/test.xml0000644000176700017710000000622311566532060020003 0ustar deogardeogar stemming indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 charset_table = -, 0..9, A..Z->a..z, a..z, U+410..U+42F->U+430..U+44F, U+430..U+44F morphology = stem_ru, stem_en } index morph0 { source = srctest path = /morph0 dict = keywords min_prefix_len = 1 enable_star = 1 } index morph1 { source = srctest path = /morph1 dict = keywords min_prefix_len = 1 enable_star = 1 morphology = stem_en } "one two"~3 three =running =run running* runnin* run* ru* =runnin* runnings* runnin running run =running =run running* runnin* run* ru* =runnin* runnings* runnin running run CALL KEYWORDS ('то-тичеÑкий', 'test') CALL KEYWORDS ('тоЫтичеÑкий', 'test') CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'and nothing else matters' ), ( 2, 'running into trouble' ) sphinx-2.0.4-release/test/test_038/model.bin0000644000176700017710000002256011566532060020076 0ustar deogardeogara:1:{i:0;a:25:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.002";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:3:"two";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}s:5:"three";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:380:""one two"~3 three";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:8:"=running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"=running";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"=run";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"running*";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"runnin*";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"run*";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"ru*";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"=runnin*";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"runnings*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"runnings*";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"runnin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"runnin";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"running";}i:11;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"=running";}i:13;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"=run";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"running*";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"runnin*";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"run*";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"ru*";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"=runnin*";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:9:"runnings*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:"runnings*";}i:20;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"runnin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"runnin";}i:21;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1643";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"running";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"running";}i:22;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"run";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"run";}i:23;a:3:{s:8:"sphinxql";s:47:"CALL KEYWORDS ('то-тичеÑкий', 'test')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:9:"tokenized";s:21:"то-тичеÑкий";s:10:"normalized";s:17:"то-тичеÑк";}}}i:24;a:3:{s:8:"sphinxql";s:48:"CALL KEYWORDS ('тоЫтичеÑкий', 'test')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:9:"tokenized";s:22:"тоытичеÑкий";s:10:"normalized";s:18:"тоытичеÑк";}}}}}sphinx-2.0.4-release/test/bench.php0000644000176700017710000003436011413352753016524 0ustar deogardeogar array(), 'sources' => $sources, 'versions' => array ( $data[0]['version'], $data[1]['version'] ), 'checksums' => $data[0]['hash'] == $data[1]['hash'], ); if ( count($data[0]['results']) != count($data[1]['results']) ) Fatal ( "unable to produce the report, result set sizes mismatch" ); $combined = array(); $skip = array(); foreach ( $data as &$set ) { $out = array(); for ( $i=0; $i $tag, 'time' => 0, 'total' => 0, 'total_found' => 0 ); } $row['time'] = (float)$row['time']; $out[$tag]['time'] += $row['time']; $out[$tag]['total'] += $row['total']; $out[$tag]['total_found'] += $row['total_found']; } $combined[] = $out; } if ( array_keys($combined[0]) != array_keys($combined[1]) ) Fatal ( "unable to produce the report, tag sets mismatch" ); $report['aggregate'] = sphCompareSets ( array ( array_values ( $combined[0] ), array_values ( $combined[1] ) ) ); $report['detailed'] = sphCompareSets ( array ( $data[0]['results'], $data[1]['results'] ) ); return $report; } //////////////////////////////////////////////////////////////////////////////// function sphTextReport ( $report ) { global $g_locals; $mode = $g_locals['mode']; $width = array (); $header = array ( 'query', 'time', 'total', 'total found' ); foreach ( $header as $title ) $width[] = strlen($title); foreach ( $report[$mode] as $row ) { if ( count($row)==1 ) continue; for ( $i=0; $i<4; $i++ ) { $len = min ( strlen($row[$i]), 40 ); $width[$i] = max ( $width[$i], $len ); } } printf ( "COMPARING: %s - %s\n", $report['versions'][0], $report['sources'][0] ); printf ( " %s - %s\n", $report['versions'][1], $report['sources'][1] ); if ( !$report['checksums'] ) printf ( "WARNING: checksum mismatch, results might be incorrect\n\n" ); if ( $mode=='aggregate' ) printf("\n"); $restart = true; foreach ( $report[$mode] as $row ) { if ( count($row)==1 ) { printf("\n"); $w = str_repeat ( '=', ( array_sum($width) + 9 - strlen($row[0]) ) / 2 ); printf ( " %s %s %s\n", $w, $row[0], $w ); $restart = true; } if ( $restart ) { for ( $i=0; $i<4; $i++ ) printf ( ' %s', str_pad ( $header[$i], $width[$i] + 2, ' ', STR_PAD_BOTH ) ); printf("\n"); for ( $i=0; $i<4; $i++ ) printf ( ' %s', str_repeat ( '-', $width[$i] + 2 ) ); $restart = false; printf("\n"); } if ( count($row)==4 ) for ( $i=0; $i<4; $i++ ) { $text = $row[$i]; if ( strlen($text) > 40 ) $text = substr ( $text, 0, 37 ) . '...'; if ( $i>1 ) $text = number_format ( $text, 0, '', ' ' ); printf ( ' %s', str_pad ( $text, $width[$i] + 2, ' ', STR_PAD_LEFT ) ); } printf("\n"); } if ( !count ( $report[$mode] ) ) printf ( "empty report.\n" ); } //////////////////////////////////////////////////////////////////////////////// /// erases rt index function EraseRtIndex ( $path, $name ) { $fp = opendir ( $path ); if ( $fp ) { $name .= '.'; while ( ( $file = readdir ( $fp ) ) !== false ) { if ( $file != "." && $file != ".." && !is_dir ( $file ) && strripos ( $file, $name ) !==false ) unlink ( "$file" ); } closedir ( $fp ); } } //////////////////////////////////////////////////////////////////////////////// /// returns results file name on success; false on failure function sphBenchmark ( $name, $locals, $force_reindex ) { // load config $config = new SphinxConfig ( $locals ); if ( !( $config->Load ( "bench/$name.xml" ) && CheckConfig ( $config, $name ) ) ) return false; global $g_locals; $g_locals['rt_mode']= $config->Requires('force-rt'); // temporary limitations assert ( $config->SubtestCount()==1 ); assert ( $config->IsQueryTest() ); // find unused output prefix $i = 0; for ( ; file_exists("bench-results/$name.$i.bin"); $i++ ); $output = "bench-results/$name.$i"; printf ( "benchmarking: %s\n", $config->Name() ); // grab index names and paths $msg = ''; if ( !$config->IsRt() ) // enable only in non rt-mode $config->EnableCompat098 (); $config->WriteConfig ( 'config.conf', 'all', $msg ); $indexes = array(); $text = file_get_contents('config.conf'); preg_match_all ( '/index\s+(\S+)\s+{[^}]+path\s*=\s*(.*)[^}]+}/m', $text, $matches ); for ( $i=0; $i $path ) { printf ( "index: %s - ", $indexName ); if ( $config->IsRt() && $force_reindex ) EraseRtIndex ( $locals['data'], $path ); if ( !$config->IsRt() && ( !is_readable ( "$path.spa" ) || !is_readable ( "$path.spi" ) || $force_reindex ) ) { printf ( "indexing... " ); $tm = MyMicrotime(); $result = RunIndexer ( $error, $indexName ); $tm = MyMicrotime() - $tm; if ( $result==1 ) { printf ( "\nerror running the indexer:\n%s\n", $error ); return false; } else if ( $result==2 ) printf ( "done in %s, there were warnings:\n%s\n", sphFormatTime($tm), $error ); else printf ( "done in %s - ", sphFormatTime($tm) ); } if ( !$config->IsRt() ) { $hash = array ( 'spi' => md5_file ( "$path.spi" ), 'spa' => md5_file ( "$path.spa" ) ); printf ( "%s\n", $hash['spi'] ); } else { $hash = array ( 'xml'=>md5_file ("bench/$name.xml") ); printf ( "%s\n", $hash['xml'] ); } } // start searchd if ( !$locals['skip-searchd'] ) { $result = StartSearchd ( 'config.conf', "$output.searchd.txt", 'searchd.pid', $error ); if ( $result==1 ) { printf ( "error starting searchd:\n%s\n", $error ); return false; } else if ( $result==2 ) printf ( "searchd warning: %s\n", $error ); } // run the benchmark $isOK = false; if ( $config->IsSphinxqlTest () ) $isOK = $config->RunQuerySphinxQL ( $error, true ); else $isOK = $config->RunQuery ( '*', $error, 'warming-up:' ) && $config->RunQuery ( '*', $error, 'profiling:' ); if ( $isOK ) { $report = array ( 'results' => array(), 'time' => time(), 'hash' => $hash, 'version' => GetVersion() ); $i = 0; $q = null; $last = ''; foreach ( $config->Results() as $result ) { if ( $config->IsSphinxqlTest () ) { if ( $result['sphinxql']=='show meta' ) { $report['results'][] = array ( 'total' => $result['rows'][0]['Value'], 'total_found' => $result['rows'][1]['Value'], 'time' => $result['rows'][2]['Value'], 'query' => $last, 'tag' => $last ); } $last = $result['sphinxql']; } else { if ( $result[0] !== $q ) { $i = 0; $q = $result[0]; } $query = $config->GetQuery ( $q ); $report['results'][] = array ( 'total' => $result[1], 'total_found' => $result[2], 'time' => $result[3], 'query' => $query['query'][$i++], 'tag' => $query['tag'] ); } } file_put_contents ( "$output.bin", serialize ( $report ) ); printf ( "results saved to: $output.bin\n" ); } else printf ( "\nfailed to run queries:\n%s\n", $error ); // shutdown StopSearchd ( 'config.conf', 'searchd.pid' ); // all good return $output; } //////////////////////////////////////////////////////////////////////////////// /// pick $count freshest run files for $bench benchmark function PickFreshest ( $bench, $count ) { // traverse results dir for $bench.RUNID.bin $found = array(); $dh = opendir ( "bench-results" ); if ( $dh ) { while ( $entry = readdir ( $dh ) ) { if ( substr ( $entry, 0, 1+strlen($bench) )!==$bench."." ) continue; if ( substr ( $entry, -4 )!==".bin" ) continue; $index = (int)substr ( $entry, 1+strlen($bench) ); $found[$index] = "bench-results/$entry"; } closedir ( $dh ); } if ( !$found ) return null; ksort ( $found ); return array_slice ( array_values ( $found ), -$count ); } /// lookup run file by name, doing some common guesses function LookupRun ( $name ) { // try full BENCH.RUNID guesses first $found = null; foreach ( array ( $name, "$name.bin", "bench-results/$name", "bench-results/$name.bin" ) as $guess ) { if ( is_readable($guess) ) { $found = $guess; break; } } // try pick the freshest one by bench name next if ( !$found ) list($found) = PickFreshest ( $name, 1 ); // still not found? too bad if ( !$found ) Fatal ( "no run files for '$name' found" ); return $found; } function BenchPrintHelp ( $path ) { print << [OPTIONS] Commands are: b, benchmark BENCH benchmark (and store run result) bv BENCH benchmark, view run result bb BENCH (forcibly) build index, benchmark bbv BENCH (forcibly) build index, benchmark, view run result c, compare BENCH compare two latest run results of given benchmark c, compare BENCH RUNID1 RUNID2 c, compare BENCH.RUNID1 BENCH.RUNID2 compare two given run results t, try BENCH benchmark, view, but do not store run result v, view BENCH.RUNID view given run result Examples: bench.php b mytest run 'mytest' benchmark bench.php v mytest view latest 'mytest' run result bench.php v mytest.4 view 4th 'mytest' run result bench.php c mytest 3 7 bench.php c mytest.3 mytest.7 compare runs 3 and 7 of 'mytest' benchmark EOT; } function AvailableBenchmarks () { $dh = opendir ( "bench" ); if ( !$dh ) return; $avail = array (); while ( $entry = readdir ( $dh ) ) if ( substr ( $entry, -4 )===".xml" ) $avail[] = substr ( $entry, 0, -4 ); closedir ( $dh ); return join ( ", or ", $avail ); } function Main ( $argv ) { if ( count($argv)==1 ) { BenchPrintHelp ( $argv[0] ); return 0; } $mode = null; $files = array(); $locals = array(); // parse arguments $force_reindex = false; $view_results = false; $unlink_run = false; for ( $i=1; $i position anchors indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test } ^one two ^other three three$ ^badger CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `title` varchar(5120) NOT NULL default '', `body` varchar(256) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, '', 'one' ), ( 2, '', 'one and two' ), ( 3, '', 'one but not the other one' ), ( 4, '', 'two and one' ), ( 9, '', 'other three' ), ( 10, '', 'three' ), ( 11, '', 'three' ), ( 12, '', 'three' ), ( 13, '', 'three' ), ( 14, '', 'three' ), ( 15, '', 'three' ), ( 16, '', 'three' ), ( 17, '', 'three' ), ( 18, '', 'three' ), ( 19, '', 'three' ) INSERT INTO test_table SELECT document_id+10, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table SELECT document_id+20, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table SELECT document_id+40, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table SELECT document_id+80, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table SELECT document_id+160, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table SELECT document_id+320, title, body FROM test_table WHERE document_id>=10 INSERT INTO test_table VALUES ( 2000, REPEAT('badger ',600), 'badger badger mushroom' ) INSERT INTO `test_table` VALUES ( 1000, '', 'other' ), ( 1001, '', 'other three blind mice' ); sphinx-2.0.4-release/test/test_055/model.bin0000644000176700017710000000547311406464303020076 0ustar deogardeogara:1:{i:0;a:4:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"1690";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"5";}s:3:"two";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:8:"^one two";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:9;a:2:{s:6:"weight";s:4:"2509";s:5:"attrs";a:0:{}}i:1001;a:2:{s:6:"weight";s:4:"2509";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"other";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:3:"642";s:4:"hits";s:3:"642";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:"^other three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:20:{i:9;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:10;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:17;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:18;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:19;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:20;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:21;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:22;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:23;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:24;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:25;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:26;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:27;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}i:28;a:2:{s:6:"weight";s:4:"1341";s:5:"attrs";a:0:{}}}s:5:"total";s:3:"641";s:11:"total_found";s:3:"641";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:5:"three";a:2:{s:4:"docs";s:3:"642";s:4:"hits";s:3:"642";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"three$";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:2:{i:0;s:5:"title";i:1;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2000;a:2:{s:6:"weight";s:4:"2998";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"badger";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:3:"602";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"^badger";}}}sphinx-2.0.4-release/test/test_116/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_116/test.xml0000644000176700017710000000271411503513132017767 0ustar deogardeogar Bound cases of the proximity node indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table } index idx { source = src path = /main charset_type = utf-8 } "a b"~3 "e f"~2 "i j"~2 CREATE TABLE test_table ( id INTEGER AUTO_INCREMENT PRIMARY KEY NOT NULL, title TEXT NOT NULL ) ENGINE=MYISAM DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_116/model.bin0000644000176700017710000000457511344225227020077 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:1:"a";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}s:1:"b";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""a b"~3";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:20:{i:521;a:2:{s:6:"weight";s:1:"3";s:5:"attrs";a:0:{}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:12;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:13;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:14;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:15;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:16;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:17;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:18;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:19;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:20;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:21;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:22;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:23;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:24;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:25;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:26;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:27;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:28;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:29;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:3:"511";s:11:"total_found";s:3:"511";s:4:"time";s:5:"0.004";s:5:"words";a:2:{s:1:"e";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:3:"513";}s:1:"f";a:2:{s:4:"docs";s:3:"511";s:4:"hits";s:3:"513";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""e f"~2";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:522;a:2:{s:6:"weight";s:3:"532";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:2:{s:1:"i";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:3:"532";}s:1:"j";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:3:"532";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:""i j"~2";}}}sphinx-2.0.4-release/test/test_034/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_034/test.xml0000644000176700017710000000244611605620330017772 0ustar deogardeogar distibuted index vs MVA indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT document_id, text, mva, tag FROM test_table sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field sql_attr_uint = tag } index idx { source = src path = /idx docinfo = extern charset_type = utf-8 min_word_len = 1 } index dist { type = distributed agent = :idx agent_connect_timeout = 1000 agent_query_timeout = 3000 } 2 CREATE TABLE `test_table` ( `document_id` int(11), `text` varchar(255) NOT NULL, `mva` varchar(255) NOT NULL, `tag` int(11) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'test1', '1', 11 ), ( 2, 'test2', '2,3', 22 ), ( 3, 'test3', '4,5,6', 33 ), ( 4, 'test4', '7,8,9,10', 44 ), ( 5, 'test5', '', 55 ) sphinx-2.0.4-release/test/test_034/model.bin0000644000176700017710000000326411605620330020062 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:1;a:1:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:3:"mva";i:1073741825;s:3:"tag";i:1;}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:1:{i:0;s:1:"1";}s:3:"tag";s:2:"11";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:2:{i:0;s:1:"2";i:1;s:1:"3";}s:3:"tag";s:2:"22";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:3:{i:0;s:1:"4";i:1;s:1:"5";i:2;s:1:"6";}s:3:"tag";s:2:"33";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:4:{i:0;s:1:"7";i:1;s:1:"8";i:2;s:1:"9";i:3;s:2:"10";}s:3:"tag";s:2:"44";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"mva";a:0:{}s:3:"tag";s:2:"55";}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_036/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_036/test.xml0000644000176700017710000000505711605620330017775 0ustar deogardeogar merge vs --merge-dst-range indexer { mem_limit = 16M } searchd { } source src_base { type = mysql } source srcmain : src_base { sql_query = SELECT * FROM test_table WHERE document_id in (1,2,3,4,5,6,7,8) sql_attr_uint = tag1 sql_attr_uint = tag2 sql_attr_multi = uint mva1 from field sql_attr_multi = uint mva2 from field } source srcdelta : src_base { sql_query = SELECT * FROM test_table WHERE document_id in (9,10,11) sql_attr_uint = tag1 sql_attr_uint = tag2 sql_attr_multi = uint mva1 from field sql_attr_multi = uint mva2 from field } source srcmain : src_base { sql_query = SELECT * FROM test_table WHERE document_id in (1,2,3,4,5,6,7,8) sql_attr_uint = tag1 sql_attr_uint = tag2 sql_attr_multi = bigint mva1 from field sql_attr_multi = bigint mva2 from field } source srcdelta : src_base { sql_query = SELECT * FROM test_table WHERE document_id in (9,10,11) sql_attr_uint = tag1 sql_attr_uint = tag2 sql_attr_multi = bigint mva1 from field sql_attr_multi = bigint mva2 from field } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta --merge-dst-range tag2 0 0 main delta test1 test2 CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag1` int(11) NOT NULL default '0', `tag2` int(11) NOT NULL default '0', `mva1` varchar(255) NOT NULL default '', `mva2` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 0, 1, '', '1 2 3','main' ), ( 2, 0, 0, '2 22 222', '', 'main' ), ( 3, 0, 0, '3 33 333', '5 6 7','main' ), ( 4, 0, 1, '', '', 'test1' ), ( 5, 0, 1, '', '', 'test1' ), ( 6, 0, 0, '6', '', 'test1' ), ( 7, 0, 1, '7', '', 'test2' ), ( 8, 0, 1, '8', '', 'test2' ), ( 9, 0, 0, '1 10 100', '9 0 1','delta' ), (10, 0, 0, '2 20 200', '', 'delta' ), (11, 0, 0, '', '', 'delta' ) sphinx-2.0.4-release/test/test_036/model.bin0000644000176700017710000001227611605620330020067 0ustar deogardeogara:2:{i:0;a:4:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"2";i:1;s:2:"22";i:2;s:3:"222";}s:4:"mva2";a:0:{}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"3";i:1;s:2:"33";i:2;s:3:"333";}s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"main";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"main";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:3:{i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}s:4:"mva2";a:3:{i:0;s:1:"0";i:1;s:1:"1";i:2;s:1:"9";}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"2";i:1;s:2:"20";i:2;s:3:"200";}s:4:"mva2";a:0:{}}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:0:{}s:4:"mva2";a:0:{}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"delta";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"delta";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:1:{i:0;s:1:"6";}s:4:"mva2";a:0:{}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test1";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"test2";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test2";}}i:1;a:4:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"2";i:1;s:2:"22";i:2;s:3:"222";}s:4:"mva2";a:0:{}}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"3";i:1;s:2:"33";i:2;s:3:"333";}s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"main";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"main";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:3:{i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}s:4:"mva2";a:3:{i:0;s:1:"0";i:1;s:1:"1";i:2;s:1:"9";}}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:3:{i:0;s:1:"2";i:1;s:2:"20";i:2;s:3:"200";}s:4:"mva2";a:0:{}}}i:11;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:0:{}s:4:"mva2";a:0:{}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"delta";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"delta";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"tag1";s:1:"0";s:4:"tag2";s:1:"0";s:4:"mva1";a:1:{i:0;s:1:"6";}s:4:"mva2";a:0:{}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"test1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test1";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"mva1";i:1073741825;s:4:"mva2";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"test2";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"test2";}}}sphinx-2.0.4-release/test/test_157/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_157/test.xml0000644000176700017710000000165711475670625020024 0ustar deogardeogar quorum vs ranker Reset indexer { mem_limit = 16M } searchd { workers = threads } index rt { type = rt docinfo = extern charset_type = utf-8 path = /rt rt_attr_uint = idd rt_field = body rt_mem_limit = 8M } insert into rt (id, idd, body) values ( 1, 11, 'this is cool place' ) insert into rt (id, idd, body) values ( 2, 11, 'cool place is like no other' ) insert into rt (id, idd, body) values ( 3, 11, 'place is cool becouse there is no things like this' ) select * from rt where match ('"is cool place"/3') order by @id asc select * from rt where match ('"there things is cool place"/3') order by @id asc sphinx-2.0.4-release/test/test_157/model.bin0000644000176700017710000000220111475670625020077 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:2:{s:8:"sphinxql";s:69:"insert into rt (id, idd, body) values ( 1, 11, 'this is cool place' )";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:78:"insert into rt (id, idd, body) values ( 2, 11, 'cool place is like no other' )";s:14:"total_affected";i:1;}i:2;a:2:{s:8:"sphinxql";s:101:"insert into rt (id, idd, body) values ( 3, 11, 'place is cool becouse there is no things like this' )";s:14:"total_affected";i:1;}i:3;a:3:{s:8:"sphinxql";s:67:"select * from rt where match ('"is cool place"/3') order by @id asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"3319";s:3:"idd";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2319";s:3:"idd";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2297";s:3:"idd";s:2:"11";}}}i:4;a:3:{s:8:"sphinxql";s:80:"select * from rt where match ('"there things is cool place"/3') order by @id asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"3391";s:3:"idd";s:2:"11";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2391";s:3:"idd";s:2:"11";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2450";s:3:"idd";s:2:"11";}}}}}sphinx-2.0.4-release/test/test_027/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_027/test.xml0000644000176700017710000000447211605620330017775 0ustar deogardeogar merge vs docinfo, range, mva indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM test_table WHERE document_id in (1,2,3) sql_attr_uint = tag sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field } source srcdelta { type = mysql sql_query = SELECT * FROM test_table WHERE document_id in (4,5,6) sql_attr_uint = tag sql_attr_multi = uint mva from field sql_attr_multi = bigint mva from field } index main { source = srcmain path = /main charset_type = utf-8 docinfo = inline docinfo = extern } index delta { source = srcdelta path = /delta charset_type = utf-8 docinfo = inline docinfo = extern } --merge main delta --merge-dst-range tag 1 2 main1 main2 main3 delta1 delta2 delta3 gamma0 CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag` int(11) NOT NULL default '0', `mva` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, '1 11 111', 'main1' ), ( 2, 2, '2 22 222', 'main2' ), ( 3, 3, '3 33 333', 'main3' ), ( 4, 11,'1 10 100', 'delta1' ), ( 5, 22,'2 20 200', 'delta2' ), ( 6, 33,'3 30 300', 'delta3' ) sphinx-2.0.4-release/test/test_027/model.bin0000644000176700017710000002234711605620330020067 0ustar deogardeogara:36:{i:0;a:1:{i:0;s:6:"failed";}i:1;a:1:{i:0;s:6:"failed";}i:2;a:1:{i:0;s:6:"failed";}i:3;a:1:{i:0;s:6:"failed";}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:1:{i:0;s:6:"failed";}i:7;a:1:{i:0;s:6:"failed";}i:8;a:1:{i:0;s:6:"failed";}i:9;a:1:{i:0;s:6:"failed";}i:10;a:1:{i:0;s:6:"failed";}i:11;a:1:{i:0;s:6:"failed";}i:12;a:1:{i:0;s:6:"failed";}i:13;a:1:{i:0;s:6:"failed";}i:14;a:1:{i:0;s:6:"failed";}i:15;a:1:{i:0;s:6:"failed";}i:16;a:1:{i:0;s:6:"failed";}i:17;a:1:{i:0;s:6:"failed";}i:18;a:1:{i:0;s:6:"failed";}i:19;a:1:{i:0;s:6:"failed";}i:20;a:1:{i:0;s:6:"failed";}i:21;a:1:{i:0;s:6:"failed";}i:22;a:1:{i:0;s:6:"failed";}i:23;a:1:{i:0;s:6:"failed";}i:24;a:1:{i:0;s:6:"failed";}i:25;a:1:{i:0;s:6:"failed";}i:26;a:1:{i:0;s:6:"failed";}i:27;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"1";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"11";i:2;s:3:"111";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main1";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:3:"mva";a:3:{i:0;s:1:"2";i:1;s:2:"22";i:2;s:3:"222";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main2";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main3";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main3";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta1";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"22";s:3:"mva";a:3:{i:0;s:1:"2";i:1;s:2:"20";i:2;s:3:"200";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta2";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"33";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:2:"30";i:2;s:3:"300";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta3";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"gamma0";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"gamma0";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"1";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"11";i:2;s:3:"111";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:28;a:1:{i:0;s:6:"failed";}i:29;a:1:{i:0;s:6:"failed";}i:30;a:1:{i:0;s:6:"failed";}i:31;a:1:{i:0;s:6:"failed";}i:32;a:1:{i:0;s:6:"failed";}i:33;a:1:{i:0;s:6:"failed";}i:34;a:1:{i:0;s:6:"failed";}i:35;a:9:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"1";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"11";i:2;s:3:"111";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main1";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:3:"mva";a:3:{i:0;s:1:"2";i:1;s:2:"22";i:2;s:3:"222";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main2";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"main3";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"main3";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta1";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta1";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"22";s:3:"mva";a:3:{i:0;s:1:"2";i:1;s:2:"20";i:2;s:3:"200";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta2";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta2";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"33";s:3:"mva";a:3:{i:0;s:1:"3";i:1;s:2:"30";i:2;s:3:"300";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"delta3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"delta3";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"gamma0";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"gamma0";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"1";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"11";i:2;s:3:"111";}}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:3:"mva";i:1073741825;}s:7:"matches";a:1:{i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:2:"11";s:3:"mva";a:3:{i:0;s:1:"1";i:1;s:2:"10";i:2;s:3:"100";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/stopwords.txt0000644000176700017710000000001511033237056017523 0ustar deogardeogara the and of sphinx-2.0.4-release/test/test_030/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_030/test.xml0000644000176700017710000000177611025543651020001 0ustar deogardeogar ext2 ranking indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id, body FROM test_table } index test { source = srctest path = /test stopwords = stopwords.txt min_word_len = 3 } one two three senior pastor of riverside church CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three' ), ( 2, 'one two three four' ), ( 3, 'one then two then three then four' ), ( 4, 'senior pastor of Riverside church' ), ( 5, 'senior pastor and the Riverside church' ) sphinx-2.0.4-release/test/test_030/model.bin0000644000176700017710000000227511025543651020065 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"3500";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3500";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.033";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:3:"two";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:5:"three";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:13:"one two three";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:2:{i:4;a:2:{s:6:"weight";s:4:"4587";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"2587";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.004";s:5:"words";a:4:{s:6:"senior";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"pastor";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:9:"riverside";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:6:"church";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:5:"query";s:33:"senior pastor of riverside church";}}}sphinx-2.0.4-release/test/test_007/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_007/test.xml0000644000176700017710000000400310744717627020004 0ustar deogardeogar prefix_fields/infix_fields (part 4) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 0 enable_star = 1 min_infix_len = 3 min_prefix_len = 3 prefix_fields = prefix_fields = subject prefix_fields = body prefix_fields = body, author infix_fields = infix_fields = SUBJECT infix_fields = body infix_fields = subject, AUTHOR } enab* grea* Mak* *ble* *thor* *oda* CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_007/model.bin0000644000176700017710000002317210725372550020074 0ustar deogardeogara:16:{i:0;a:1:{i:0;s:6:"failed";}i:1;a:1:{i:0;s:6:"failed";}i:2;a:1:{i:0;s:6:"failed";}i:3;a:1:{i:0;s:6:"failed";}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:7;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:8;a:1:{i:0;s:6:"failed";}i:9;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"Mak*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*ble*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"*thor*";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:5:"*oda*";}}i:10;a:1:{i:0;s:6:"failed";}i:11;a:1:{i:0;s:6:"failed";}i:12;a:1:{i:0;s:6:"failed";}i:13;a:1:{i:0;s:6:"failed";}i:14;a:6:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"enab*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:5:"query";s:5:"enab*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"grea*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"grea*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"mak*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"Mak*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"*ble*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"6";}}s:5:"query";s:5:"*ble*";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*thor*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*thor*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:5:"*oda*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"*oda*";}}i:15;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/test_083/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_083/test.xml0000644000176700017710000000260511325631662020004 0ustar deogardeogar subtree cache vs AND node reset indexer { mem_limit = 16M } searchd { subtree_docs_cache = 0 subtree_docs_cache = 40 subtree_docs_cache = 10M subtree_hits_cache = 0 subtree_hits_cache = 40 subtree_hits_cache = 10M } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test_idx { source = srctest path = /test charset_type = utf-8 } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one two three one' ), ( 2, 'one two three one two' ), ( 3, 'one two three one two three' ), ( 4, 'one two three four six' ), ( 5, 'one two three four five seven' ) SetMatchMode (SPH_MATCH_EXTENDED2); $client->AddQuery ("(one two three) |one"); $client->AddQuery ("(one two three) |six"); $results = $client->RunQueries (); ]]> sphinx-2.0.4-release/test/test_083/model.bin0000644000176700017710000003056411325631662020102 0ustar deogardeogara:9:{i:0;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.000";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:1;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:2;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:3;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:4;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:5;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:6;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:7;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}i:8;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:1;a:2:{s:6:"weight";s:4:"4270";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"4244";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"4219";s:5:"attrs";a:0:{}}i:4;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3295";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:5:{i:4;a:2:{s:6:"weight";s:4:"3397";s:5:"attrs";a:0:{}}i:5;a:2:{s:6:"weight";s:4:"3346";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"3327";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"3308";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"3289";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"5";s:11:"total_found";s:1:"5";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"one";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"8";}s:3:"two";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"7";}s:5:"three";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"6";}s:3:"six";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_133/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_133/test.xml0000644000176700017710000001166611720460762020007 0ustar deogardeogar SENTENCE, PARAGRAPH, and ZONE operators indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index test { source = test path = /test html_strip = 1 index_sp = 1 index_zones = zone* dict = crc dict = keywords exceptions = synonyms.txt } create table test_table ( id int not null, gid int not null, title varchar(16384) not null ); drop table if exists test_table; insert into test_table values ( 1, 1, 'One and one and one. And two. And three, all separate.' ); insert into test_table values ( 2, 1, 'And then we\'ll have him, one two three! Kidnap the Sandy Clawz...' ); insert into test_table values ( 3, 1, 'One two something. But not three.' ); insert into test_table values ( 4, 1, 'Two says hello to one more three.' ); insert into test_table values ( 5, 1, CONCAT(REPEAT('A ram zam zam. ', 171), 'Zam ram!') ); insert into test_table values ( 6, 1, 'A ram zam zam, a ram zam zam, guli guli guli guli ram zam zam.' ); insert into test_table values ( 100, 2, 'Quick brown fox<p>jumps over a lazy dog.' ); insert into test_table values ( 101, 2, 'In paragraph, yes. Not in sentence, no.' ); The time has come, the walrus said, to talk of many things.' );]]> and ships, and sealing wax, of cabbages, , and kings.' );]]> , and kings.' );]]> Sweet dreams are made of this.Who am I to disagree?' );]]> Sweet of this.Who am I to disagree?' );]]> I like this. Sweet of  this.' );]]> I like this. Sweet of  this.' );]]> one SENTENCE two one SENTENCE two three one SENTENCE two SENTENCE three "one two" SENTENCE three zam SENTENCE ram fox PARAGRAPH dog sentence SENTENCE paragraph sentence PARAGRAPH paragraph ZONE:zoneA walrus ZONE:zoneB walrus ZONE:zoneA walrus time ZONE:zoneB walrus time ZONE:zoneC cabbages and kings ZONE:zoneC kings and ships ZONE:(zoneA,zoneB) sweet disagree "Who am I" "of this" ZONE:zoneA "like this" strikes sphinx-2.0.4-release/test/test_133/model.bin0000644000176700017710000011661611720460762020101 0ustar deogardeogara:4:{i:0;a:19:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"1594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"one SENTENCE two";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"one SENTENCE two three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"one SENTENCE two SENTENCE three";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:""one two" SENTENCE three";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2853";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:6;a:2:{s:6:"weight";s:4:"1775";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"zam";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"349";}s:3:"ram";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"175";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"zam SENTENCE ram";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"fox";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"fox PARAGRAPH dog";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:27:"sentence SENTENCE paragraph";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"sentence PARAGRAPH paragraph";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneA walrus";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneB walrus";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA walrus time";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneB walrus time";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:8:"cabbages";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:"ZONE:zoneC cabbages and kings";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:201;a:2:{s:6:"weight";s:4:"2644";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"ships";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:26:"ZONE:zoneC kings and ships";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:202;a:2:{s:6:"weight";s:4:"1628";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"sweet";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:8:"disagree";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:33:"ZONE:(zoneA,zoneB) sweet disagree";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:202;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"who";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"am";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"i";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""Who am I"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:310;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:202;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"of";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:2:"10";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""of this"";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:310;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"like";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA "like this"";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:400;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"strikes";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"strikes";}}i:1;a:19:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"1594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"one SENTENCE two";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"one SENTENCE two three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"one SENTENCE two SENTENCE three";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:""one two" SENTENCE three";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2853";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:6;a:2:{s:6:"weight";s:4:"1775";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"zam";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"349";}s:3:"ram";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"175";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"zam SENTENCE ram";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"fox";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"fox PARAGRAPH dog";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:27:"sentence SENTENCE paragraph";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"sentence PARAGRAPH paragraph";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneA walrus";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneB walrus";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA walrus time";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneB walrus time";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:8:"cabbages";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:"ZONE:zoneC cabbages and kings";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:201;a:2:{s:6:"weight";s:4:"2644";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"ships";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:26:"ZONE:zoneC kings and ships";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:202;a:2:{s:6:"weight";s:4:"1628";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"sweet";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:8:"disagree";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:33:"ZONE:(zoneA,zoneB) sweet disagree";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:202;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"who";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"am";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"i";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""Who am I"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:310;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:202;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"of";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:2:"10";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""of this"";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:310;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"like";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA "like this"";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:400;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"strikes";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"strikes";}}i:2;a:19:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"1594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"one SENTENCE two";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"one SENTENCE two three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"one SENTENCE two SENTENCE three";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:""one two" SENTENCE three";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2853";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:6;a:2:{s:6:"weight";s:4:"1775";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"zam";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"349";}s:3:"ram";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"175";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"zam SENTENCE ram";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"fox";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"fox PARAGRAPH dog";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:27:"sentence SENTENCE paragraph";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"sentence PARAGRAPH paragraph";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneA walrus";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneB walrus";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA walrus time";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneB walrus time";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:8:"cabbages";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:"ZONE:zoneC cabbages and kings";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:201;a:2:{s:6:"weight";s:4:"2644";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"ships";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:26:"ZONE:zoneC kings and ships";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:202;a:2:{s:6:"weight";s:4:"1628";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"sweet";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:8:"disagree";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:33:"ZONE:(zoneA,zoneB) sweet disagree";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:202;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"who";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"am";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"i";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""Who am I"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:310;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:202;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"of";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:2:"10";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""of this"";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:310;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"like";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA "like this"";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:400;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"strikes";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"strikes";}}i:3;a:19:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"1594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"one SENTENCE two";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:3;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"one SENTENCE two three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:2;a:2:{s:6:"weight";s:4:"3594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:4;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:31:"one SENTENCE two SENTENCE three";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:4:"2594";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"one";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}s:3:"two";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:5:"three";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:24:""one two" SENTENCE three";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:5;a:2:{s:6:"weight";s:4:"2853";s:5:"attrs";a:1:{s:3:"gid";i:1;}}i:6;a:2:{s:6:"weight";s:4:"1775";s:5:"attrs";a:1:{s:3:"gid";i:1;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"zam";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"349";}s:3:"ram";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:3:"175";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:16:"zam SENTENCE ram";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:3:"fox";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"dog";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"fox PARAGRAPH dog";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:27:"sentence SENTENCE paragraph";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:101;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:2;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:8:"sentence";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:9:"paragraph";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:28:"sentence PARAGRAPH paragraph";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneA walrus";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"ZONE:zoneB walrus";}i:10;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:200;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA walrus time";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:6:"walrus";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:4:"time";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneB walrus time";}i:12;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:8:"cabbages";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:29:"ZONE:zoneC cabbages and kings";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:201;a:2:{s:6:"weight";s:4:"2644";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:5:"kings";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"and";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:2:"14";}s:5:"ships";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:26:"ZONE:zoneC kings and ships";}i:14;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:202;a:2:{s:6:"weight";s:4:"1628";s:5:"attrs";a:1:{s:3:"gid";i:3;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:5:"sweet";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}s:8:"disagree";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:33:"ZONE:(zoneA,zoneB) sweet disagree";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:202;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"3639";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:3:{s:3:"who";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:2:"am";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:1:"i";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:10:""Who am I"";}i:16;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:4:{i:310;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2569";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:202;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:3;}}i:300;a:2:{s:6:"weight";s:4:"2551";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"of";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:2:"10";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:9:""of this"";}i:17;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:2:{i:310;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}i:311;a:2:{s:6:"weight";s:4:"2626";s:5:"attrs";a:1:{s:3:"gid";i:4;}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:4:"like";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}s:4:"this";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"6";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:22:"ZONE:zoneA "like this"";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:400;a:2:{s:6:"weight";s:4:"1722";s:5:"attrs";a:1:{s:3:"gid";i:5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:7:"strikes";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"strikes";}}}sphinx-2.0.4-release/test/test_032/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_032/test.xml0000644000176700017710000000501711405706037017774 0ustar deogardeogar block index vs cutoff, updates, ranges indexer { mem_limit = 16M } searchd { } source src { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = tag } index idx { source = src path = /main charset_type = utf-8 } SetFilter ( "tag", array(150) ); $results[] = $client->Query ( "" ); if ( !$results[0] ) { $results = false; // maybe its temporary; lets retry return; } // update that block $results[] = $client->UpdateAttributes ( "idx", array("tag"), array(137=>array(1137)) ); // find the updated match // checks that it works after the update $client->ResetFilters (); $client->SetFilter ( "tag", array(1137) ); $results[] = $client->Query ( "" ); // find the matches by range // checks that range queries vs fullscan block work $client->ResetFilters (); $client->SetFilterRange ( "tag", 198, 1100 ); $results[] = $client->Query ( "" ); $client->ResetFilters (); $client->SetFilterRange ( "tag", 127, 128 ); $results[] = $client->Query ( "" ); // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) INSERT INTO test_table VALUES ( 1,1,'one' ), ( 2,2,'one' ), ( 3,3,'one' ), ( 4,4,'one' ) INSERT INTO test_table SELECT document_id+4, tag+4, body FROM test_table INSERT INTO test_table SELECT document_id+8, tag+8, body FROM test_table INSERT INTO test_table SELECT document_id+16, tag+16, body FROM test_table INSERT INTO test_table SELECT document_id+32, tag+32, body FROM test_table INSERT INTO test_table SELECT document_id+64, tag+64, body FROM test_table INSERT INTO test_table SELECT document_id+128, tag+128, body FROM test_table DELETE FROM test_table WHERE document_id>200 UPDATE test_table SET tag=200 WHERE document_id=1 DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_032/model.bin0000644000176700017710000000257111102721764020065 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:5:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:1:{i:150;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"150";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";}i:1;i:1;i:2;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:1:{i:137;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:4:"1137";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"200";}}i:198;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"198";}}i:199;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"199";}}i:200;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"200";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"tag";i:1;}s:7:"matches";a:2:{i:127;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"127";}}i:128;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"tag";s:3:"128";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";}}}}sphinx-2.0.4-release/test/test_085/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_085/test.xml0000644000176700017710000000230111271157525020000 0ustar deogardeogar merge vs dupecounter indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT id, body, id*10 AS xid FROM test_table WHERE id in (1,2,3) sql_attr_uint = xid } source srcdelta : srcmain { sql_query = SELECT id, body, id*100 AS xid FROM test_table WHERE id in (3,4,5) } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta one three five CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 'one' ), ( 2, 'two' ), ( 3, 'three blind mice, how they run' ), ( 4, 'four' ), ( 5, 'five' ) sphinx-2.0.4-release/test/test_085/model.bin0000644000176700017710000000242011271157525020073 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:2:"10";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:3:"one";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:3:"300";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"three";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:5:"three";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:3:"xid";i:1;}s:7:"matches";a:1:{i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:3:"xid";s:3:"500";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"five";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"five";}}}sphinx-2.0.4-release/test/test_173/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_173/test.xml0000644000176700017710000000221311542137511017772 0ustar deogardeogar processing error messages from local agents indexer { mem_limit = 16M } searchd { dist_threads = 3 } source src { type = mysql sql_attr_uint = attru sql_attr_string = attrs sql_query = SELECT * FROM test_table WHERE id BETWEEN $start AND $end sql_query_range = SELECT 1,1 } source src1: src { sql_query_range = SELECT 2,2 } index idx { source = src path = /main charset_type = utf-8 } index idx1 { source = src1 path = /main1 } index idxx { type = distributed local = idx local = idx1 } select attru, attru+attrs ss from idx select attru, attru+attrs ss from idxx CREATE TABLE test_table ( id INTEGER PRIMARY KEY NOT NULL, attru INTEGER NOT NULL, attrs VARCHAR(10) NOT NULL, text VARCHAR(10) NOT NULL ) ENGINE=MYISAM insert into test_table values ( 1, 1, 'test', 'test' ), (2, 2, 'test', 'test') DROP TABLE IF EXISTS test_table sphinx-2.0.4-release/test/test_173/model.bin0000644000176700017710000000064411542137511020071 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:3:{s:8:"sphinxql";s:37:"select attru, attru+attrs ss from idx";s:5:"error";s:90:"index idx: parse error: Sphinx expr: syntax error, unexpected TOK_ATTR_STRING near 'attrs'";s:5:"errno";i:1064;}i:1;a:3:{s:8:"sphinxql";s:38:"select attru, attru+attrs ss from idxx";s:5:"error";s:95:"index idx,idx1: parse error: Sphinx expr: syntax error, unexpected TOK_ATTR_STRING near 'attrs'";s:5:"errno";i:1064;}}}sphinx-2.0.4-release/test/test_186/0000755000176700017710000000000011724063141016276 5ustar deogardeogarsphinx-2.0.4-release/test/test_186/test.xml0000644000176700017710000000735711626725463020030 0ustar deogardeogar Bigint and float attribute updates via sphinxql indexer { mem_limit = 16M } searchd { workers = threads binlog_path = # } source src { type = mysql sql_query = SELECT * FROM test_table WHERE id<4 sql_attr_uint = section sql_attr_bigint = bint sql_attr_float = bfloat } source src1 : src { sql_query = SELECT * FROM test_table WHERE id<3 } source src2 : src { sql_query = SELECT * FROM test_table WHERE id=3 } index dist_no { source = src path = /idx docinfo = extern charset_type = utf-8 min_word_len = 1 } index idx1 : dist_no { source = src1 path = /idx1 } index idx2 : dist_no { source = src2 path = /idx2 } index dist0 { type = distributed local = dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist1 { type = distributed local = idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist2 { type = distributed agent = :idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist3 { type = distributed local = idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist4 { type = distributed agent = :idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist5 { type = distributed agent = :dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index rt { type = rt path = /rt charset_type = utf-8 docinfo = extern rt_field = body rt_attr_uint = gid rt_attr_bigint = bint rt_attr_float = bfloat } insert into rt (id, gid, bint, bfloat, body) values (1, 1, 1, 1.0, 'dummy'), (3, 33, 44, 55.0, 'dummy1') select * from rt select * from dist_no update dist_no set bint=-4560000000000, bfloat=55.5, section=111 where match ('test1') select * from dist_no update dist0 set bint=40000000000, bfloat=55.5, section=111 where match ('test1') select * from dist0 update dist1 set bint=410000000000, bfloat=56.6, section=112 where match ('test1') select * from dist1 update dist2 set bint=420000000000, bfloat=57.7, section=113 where match ('test1') select * from dist2 update dist3 set bint=-430000000000, bfloat=58.8, section=114 where match ('test1') select * from dist3 update dist4 set bint=440000000000, bfloat=59.9, section=115 where match ('test1') select * from dist4 update dist5 set bint=-450000000000, bfloat=60.4, section=116 where match ('test1') select * from dist5 update rt set gid=3212, bfloat=55.5, bint=2340000000000 where id=1 select * from rt CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `section` int(11) DEFAULT NULL, `bint` BIGINT NOT NULL, `bfloat` float DEFAULT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `section`, `bint`, `bfloat`) VALUES (1, 'test test1', 101, 7000000000, 3.14), (2, 'testt test2', 102, 8000000000, 1.44), (3, 'test test3', 103, -7000000000, 3.42), (4, 'testt test4', 104, -10000000000, 246.2); sphinx-2.0.4-release/test/test_186/model.bin0000644000176700017710000001327211626725463020112 0ustar deogardeogara:1:{i:0;a:19:{i:0;a:2:{s:8:"sphinxql";s:104:"insert into rt (id, gid, bint, bfloat, body) values (1, 1, 1, 1.0, 'dummy'), (3, 33, 44, 55.0, 'dummy1')";s:14:"total_affected";i:2;}i:1;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:1:"1";s:4:"bint";s:1:"1";s:6:"bfloat";s:8:"1.000000";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"bint";s:2:"44";s:6:"bfloat";s:9:"55.000000";}}}i:2;a:3:{s:8:"sphinxql";s:21:"select * from dist_no";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"bint";s:10:"7000000000";s:6:"bfloat";s:8:"3.140000";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:3;a:2:{s:8:"sphinxql";s:86:"update dist_no set bint=-4560000000000, bfloat=55.5, section=111 where match ('test1')";s:14:"total_affected";i:1;}i:4;a:3:{s:8:"sphinxql";s:21:"select * from dist_no";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"111";s:4:"bint";s:14:"-4560000000000";s:6:"bfloat";s:9:"55.500000";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:5;a:2:{s:8:"sphinxql";s:81:"update dist0 set bint=40000000000, bfloat=55.5, section=111 where match ('test1')";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:19:"select * from dist0";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"111";s:4:"bint";s:11:"40000000000";s:6:"bfloat";s:9:"55.500000";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:7;a:2:{s:8:"sphinxql";s:82:"update dist1 set bint=410000000000, bfloat=56.6, section=112 where match ('test1')";s:14:"total_affected";i:1;}i:8;a:3:{s:8:"sphinxql";s:19:"select * from dist1";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"112";s:4:"bint";s:12:"410000000000";s:6:"bfloat";s:9:"56.599998";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:9;a:2:{s:8:"sphinxql";s:82:"update dist2 set bint=420000000000, bfloat=57.7, section=113 where match ('test1')";s:14:"total_affected";i:1;}i:10;a:3:{s:8:"sphinxql";s:19:"select * from dist2";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"113";s:4:"bint";s:12:"420000000000";s:6:"bfloat";s:9:"57.700001";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:11;a:2:{s:8:"sphinxql";s:83:"update dist3 set bint=-430000000000, bfloat=58.8, section=114 where match ('test1')";s:14:"total_affected";i:1;}i:12;a:3:{s:8:"sphinxql";s:19:"select * from dist3";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"114";s:4:"bint";s:13:"-430000000000";s:6:"bfloat";s:9:"58.799999";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:13;a:2:{s:8:"sphinxql";s:82:"update dist4 set bint=440000000000, bfloat=59.9, section=115 where match ('test1')";s:14:"total_affected";i:1;}i:14;a:3:{s:8:"sphinxql";s:19:"select * from dist4";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"115";s:4:"bint";s:12:"440000000000";s:6:"bfloat";s:9:"59.900002";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:15;a:2:{s:8:"sphinxql";s:83:"update dist5 set bint=-450000000000, bfloat=60.4, section=116 where match ('test1')";s:14:"total_affected";i:1;}i:16;a:3:{s:8:"sphinxql";s:19:"select * from dist5";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"116";s:4:"bint";s:13:"-450000000000";s:6:"bfloat";s:9:"60.400002";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"bint";s:10:"8000000000";s:6:"bfloat";s:8:"1.440000";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"bint";s:11:"-7000000000";s:6:"bfloat";s:8:"3.420000";}}}i:17;a:2:{s:8:"sphinxql";s:66:"update rt set gid=3212, bfloat=55.5, bint=2340000000000 where id=1";s:14:"total_affected";i:1;}i:18;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"bint";s:13:"2340000000000";s:6:"bfloat";s:9:"55.500000";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"bint";s:2:"44";s:6:"bfloat";s:9:"55.000000";}}}}}sphinx-2.0.4-release/test/test_014/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_014/test.xml0000644000176700017710000000265111503636205017773 0ustar deogardeogar n-grams indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, body FROM test_table sql_query_pre = SET NAMES utf8 } index lj { source = srclj path = /lj charset_type = utf-8 charset_table = 0..9, A..Z->a..z, _, a..z ngram_chars = U+410..U+42F->U+430..U+44F, U+430..U+44F ngram_len = 1 } ABCDEF A B C D E F ÐБВГДЕЖ РБ Ð’ Г Д Е Ж CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) CHARACTER SET utf8 DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES (1,1,'ABCDEF ÐБВГДЕЖ') sphinx-2.0.4-release/test/test_014/model.bin0000644000176700017710000001212010734530041020050 0ustar deogardeogara:1:{i:0;a:15:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"abcdef";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:6:"ABCDEF";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"a";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"A";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"b";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"B";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"c";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"C";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"d";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"D";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"e";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"E";}i:6;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:1:"f";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:1:"F";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"7";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.007";s:5:"words";a:7:{s:2:"а";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"б";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"в";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"г";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"д";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"е";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:2:"ж";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:14:"ÐБВГДЕЖ";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"а";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Ð";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"б";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Б";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:2:"в";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Ð’";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"г";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Г";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"д";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Д";}i:13;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"е";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Е";}i:14;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"ж";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:2:"Ж";}}}sphinx-2.0.4-release/test/test_136/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_136/test.xml0000644000176700017710000000433311634664540020007 0ustar deogardeogar persistent MVA updates surviving after hard killing part 2 - *nix only indexer { mem_limit = 16M } searchd { binlog_flush = 1 binlog_path = workers = threads } source src { type = mysql sql_query = SELECT id, text, section, mva1 FROM test_table sql_attr_uint = section sql_attr_multi = uint mva1 from field mva1 sql_attr_multi = bigint mva1 from field mva1 } index idx_131 { source = src path = /idx_131 charset_type = utf-8 docinfo = extern } UpdateAttributes ( "idx_131", array("mva1"), array(1=>array(array(2,3,4)), 3=>array(array(6,7,8))),true); if ( $up >= 0 ) $results[] = sprintf("up.ok=%d", $up); else $results[] = sprintf("up.err=%s", $client->GetLastError()); $stop = StopWaitSearchd ('config.conf', 'searchd.pid'); $results[] = 'stoped='.($stop==0 ? 'ok' : 'err' ) . '(code=' . $stop . ')'; $startSta = StartSearchd ( 'config.conf', 'error.txt', 'searchd.pid', $error, true ); if ( $startSta == 0 || $startSta == 2 ) { $results[] = "started=ok"; } else $results[] = sprintf("start.err=%d local=%s client=%s", $startSta, $error, $client->GetLastError()); } // find the updated match $results[] = $client->Query ( "test3"); // final fixup; we don't want to compare times for ( $i=0; $i CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `section` int(11) DEFAULT NULL, `mva1` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `section`, `mva1`) VALUES (1, 'test1', 101, '1001'), (2, 'test2', 102, '1002 1023 4456'), (3, 'test3', 103, '1003 1008 1010'), (4, 'test4', 104, '1004 1005 1006'); sphinx-2.0.4-release/test/test_136/model.bin0000644000176700017710000000272211605620330020063 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:17:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:17:"stoped=ok(code=0)";i:3;s:10:"started=ok";i:4;s:11:"iteration=1";i:5;s:7:"up.ok=2";i:6;s:17:"stoped=ok(code=0)";i:7;s:10:"started=ok";i:8;s:11:"iteration=2";i:9;s:7:"up.ok=2";i:10;s:17:"stoped=ok(code=0)";i:11;s:10:"started=ok";i:12;s:11:"iteration=3";i:13;s:7:"up.ok=2";i:14;s:17:"stoped=ok(code=0)";i:15;s:10:"started=ok";i:16;a:6:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:7:"section";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:7:"section";i:103;s:4:"mva1";a:3:{i:0;i:6;i:1;i:7;i:2;i:8;}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"test3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:17:{i:0;s:11:"iteration=0";i:1;s:7:"up.ok=2";i:2;s:17:"stoped=ok(code=0)";i:3;s:10:"started=ok";i:4;s:11:"iteration=1";i:5;s:7:"up.ok=2";i:6;s:17:"stoped=ok(code=0)";i:7;s:10:"started=ok";i:8;s:11:"iteration=2";i:9;s:7:"up.ok=2";i:10;s:17:"stoped=ok(code=0)";i:11;s:10:"started=ok";i:12;s:11:"iteration=3";i:13;s:7:"up.ok=2";i:14;s:17:"stoped=ok(code=0)";i:15;s:10:"started=ok";i:16;a:6:{s:5:"error";s:0:"";s:5:"attrs";a:2:{s:7:"section";i:1;s:4:"mva1";i:1073741825;}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:7:"section";i:103;s:4:"mva1";a:3:{i:0;s:1:"6";i:1;s:1:"7";i:2;s:1:"8";}}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"test3";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_153/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_153/test.xml0000644000176700017710000002452111607501346020001 0ustar deogardeogar aggregate, aliases, different sorting, grouping and schemas vs different type of distributed indexer { mem_limit = 28M } searchd { compat_sphinxql_magics = 0 workers = threads } source auth { type = mysql } source src : auth { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id<4 sql_query_pre = SET NAMES UTF8 sql_attr_uint = tag sql_attr_uint = gr } source src1 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id<3 } source src2 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id=3 } source src51 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id<6 } source src52 : src { sql_query = SELECT id, tag, gr, text FROM test_table WHERE id>5 } source fake: auth { sql_query = SELECT id, gr, text FROM test_table WHERE id<4 sql_attr_uint = gr } index dist_no { source = src path = /idx docinfo = extern charset_type = utf-8 min_word_len = 1 } index idx1 : dist_no { source = src1 path = /idx1 } index idx2 : dist_no { source = src2 path = /idx2 } index idx51 : dist_no { source = src51 path = /idx51 } index idx52 : dist_no { source = src52 path = /idx52 } index idxfake : dist_no { source = fake path = /otheridx } index dist50 { type = distributed local = idx51 agent = :idx52 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist52 { type = distributed local = idx51 local = idx52 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist0 { type = distributed local = dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist1 { type = distributed local = idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist2 { type = distributed agent = :idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist3 { type = distributed local = idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist4 { type = distributed agent = :idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist5 { type = distributed agent = :dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index distfake { type = distributed agent = :idx51 agent = :idxfake agent_connect_timeout = 1000 agent_query_timeout = 3000 } index disthfake { type = distributed local = idx51 agent = :idxfake agent_connect_timeout = 1000 agent_query_timeout = 3000 } index disthfakeb { type = distributed agent = :idx51 local = idxfake agent_connect_timeout = 1000 agent_query_timeout = 3000 } index distlfake { type = distributed local = idx51 local = idxfake agent_connect_timeout = 1000 agent_query_timeout = 3000 } test test --> select * from dist_no select * from dist0 select * from dist1 select * from dist2 select * from dist3 select * from dist4 select * from dist5 select gr, sum(tag) as t from dist_no group by gr select gr, sum(tag) as t from dist0 group by gr select gr, sum(tag) as t from dist1 group by gr select gr, sum(tag) as t from dist2 group by gr select gr, sum(tag) as t from dist3 group by gr select tag,gr from dist4 group by tag order by gr asc select gr, sum(tag) as t from dist4 group by gr select gr, sum(tag) as t from dist5 group by gr select sum(tag) as t from dist0 group by gr select sum(tag) as t from dist5 group by gr select gr, tag+1 as t from dist0 group by gr select gr, tag+1 as t from dist5 group by gr select tag+1 as t from dist0 where match('test') ORDER BY gr ASC select tag+1 as t from dist5 where match('test') ORDER BY gr ASC select tag+1 as t, gr from dist0 where match('test') ORDER BY gr ASC select tag+1 as t, gr from dist5 where match('test') ORDER BY gr ASC select tag+1 as t, gr, tag, gr from dist0 where match('test') ORDER BY gr ASC select tag+1 as t, gr, tag, gr from dist5 where match('test') ORDER BY gr ASC select tag+1 as t, gr, gr+1 as g from dist0 where match('test') ORDER BY gr ASC select tag+1 as t, gr, gr+1 as g from dist5 where match('test') ORDER BY gr ASC select tag+1 as t, gr, tag as tt, tag, tag from dist0 where match('test') ORDER BY gr ASC select tag+1 as t, gr, tag as tt, tag, tag from dist5 where match('test') ORDER BY gr ASC select tag+1 as t from dist0 ORDER BY gr ASC select tag+1 as t from dist5 ORDER BY gr ASC select * from dist0 where match('test') select * from dist5 where match('test') select @id as idd, gr,tag+1 as t from dist50 order by gr asc select @id as idd, tag+1 as t from dist50 order by gr asc select @id as idd, gr,tag+1 as t from dist52 order by gr asc select @id as idd, tag+1 as t from dist52 order by gr asc select @id as idd, gr,tag+1 as t from idx51 order by gr asc select @id as idd, tag+1 as t from idx51 order by gr asc select @id as idd, gr+1 as t from distfake group by t order by tag asc select @id as idd, gr+1 as t from disthfakeb group by t order by tag asc select @id as idd, gr+1 as t from disthfake group by t order by tag asc select @id as idd, gr+1 as t from distlfake group by t order by tag asc select * from dist50 where match ( 'broken merge' ) CREATE TABLE `test_table` ( `id` int(11), `tag` int(11), `gr` int(11), `text` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` SET NAMES utf8 INSERT INTO `test_table` VALUES ( 1, 2, 3, 'test'), ( 2, 2, 2, 'test'), ( 3, 2, 3, 'test'), ( 4, 2, 10, 'test'), ( 5, 2, 9, 'test'), ( 6, 2, 8, 'test'), ( 7, 2, 7, 'test'), ( 8, 2, 6, 'test'), ( 9, 2, 5, 'test'), ( 10, 2, 4, 'test') sphinx-2.0.4-release/test/test_153/model.bin0000644000176700017710000006365511607501346020105 0ustar deogardeogara:1:{i:0;a:64:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:3:"tag";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:3:"tag";i:1;s:2:"gr";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.013";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:2:"gr";i:1;s:1:"t";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:2:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"2";s:11:"total_found";s:1:"2";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:1:"t";i:1;s:3:"tag";i:1;s:2:"gr";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:5:{s:1:"t";i:1;s:3:"tag";i:1;s:2:"gr";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:3:"tag";i:1;s:2:"gr";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";s:8:"@groupby";s:1:"2";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:2:"gr";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:13;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:3:"tag";i:1;s:2:"gr";i:1;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"test";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"test";}i:14;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"t";i:1;s:2:"gr";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:8:"@groupby";s:1:"3";s:6:"@count";s:2:"10";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:15;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:1:"t";i:1;s:2:"gr";i:1;}s:7:"matches";a:10:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:2:"10";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"9";}}i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"8";}}i:7;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"7";}}i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"6";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"5";}}i:10;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"4";}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:16;a:12:{s:5:"error";s:0:"";s:7:"warning";s:145:"index distfake: agent 127.0.0.1:6722: remote query error: index idxfake: parse error: unknown identifier 'tag' (not an attribute, not a function)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"t";i:1;s:3:"tag";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"4";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"11";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"10";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.011";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:17;a:12:{s:5:"error";s:0:"";s:7:"warning";s:87:"index idxfake: parse error: unknown identifier 'tag' (not an attribute, not a function)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"t";i:1;s:3:"tag";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"4";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"11";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"10";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:18;a:12:{s:5:"error";s:0:"";s:7:"warning";s:146:"index disthfake: agent 127.0.0.1:6722: remote query error: index idxfake: parse error: unknown identifier 'tag' (not an attribute, not a function)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"t";i:1;s:3:"tag";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"4";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"11";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"10";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:19;a:12:{s:5:"error";s:0:"";s:7:"warning";s:87:"index idxfake: parse error: unknown identifier 'tag' (not an attribute, not a function)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:4:{s:1:"t";i:1;s:3:"tag";i:1;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"4";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"4";s:6:"@count";s:1:"2";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:1:"3";s:3:"tag";s:1:"2";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"1";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"11";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"1";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:1:"t";s:2:"10";s:3:"tag";s:1:"2";s:8:"@groupby";s:2:"10";s:6:"@count";s:1:"1";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:20;a:3:{s:8:"sphinxql";s:21:"select * from dist_no";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:21;a:3:{s:8:"sphinxql";s:19:"select * from dist0";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:22;a:3:{s:8:"sphinxql";s:19:"select * from dist1";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:23;a:3:{s:8:"sphinxql";s:19:"select * from dist2";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:24;a:3:{s:8:"sphinxql";s:19:"select * from dist3";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:25;a:3:{s:8:"sphinxql";s:19:"select * from dist4";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:26;a:3:{s:8:"sphinxql";s:19:"select * from dist5";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:27;a:3:{s:8:"sphinxql";s:49:"select gr, sum(tag) as t from dist_no group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:28;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:29;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist1 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:30;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist2 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:31;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist3 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:32;a:3:{s:8:"sphinxql";s:53:"select tag,gr from dist4 group by tag order by gr asc";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:33;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist4 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:34;a:3:{s:8:"sphinxql";s:47:"select gr, sum(tag) as t from dist5 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"4";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"2";}}}i:35;a:3:{s:8:"sphinxql";s:43:"select sum(tag) as t from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:1:"t";s:1:"4";}i:1;a:1:{s:1:"t";s:1:"2";}}}i:36;a:3:{s:8:"sphinxql";s:43:"select sum(tag) as t from dist5 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:1:{s:1:"t";s:1:"4";}i:1;a:1:{s:1:"t";s:1:"2";}}}i:37;a:3:{s:8:"sphinxql";s:44:"select gr, tag+1 as t from dist0 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"3";}}}i:38;a:3:{s:8:"sphinxql";s:44:"select gr, tag+1 as t from dist5 group by gr";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:1;a:2:{s:2:"gr";s:1:"2";s:1:"t";s:1:"3";}}}i:39;a:3:{s:8:"sphinxql";s:64:"select tag+1 as t from dist0 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:1:"t";s:1:"3";}i:1;a:1:{s:1:"t";s:1:"3";}i:2;a:1:{s:1:"t";s:1:"3";}}}i:40;a:3:{s:8:"sphinxql";s:64:"select tag+1 as t from dist5 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:1:"t";s:1:"3";}i:1;a:1:{s:1:"t";s:1:"3";}i:2;a:1:{s:1:"t";s:1:"3";}}}i:41;a:3:{s:8:"sphinxql";s:68:"select tag+1 as t, gr from dist0 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";}i:1;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}i:2;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}}}i:42;a:3:{s:8:"sphinxql";s:68:"select tag+1 as t, gr from dist5 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";}i:1;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}i:2;a:2:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";}}}i:43;a:3:{s:8:"sphinxql";s:77:"select tag+1 as t, gr, tag, gr from dist0 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:3:"tag";s:1:"2";}i:1;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:3:"tag";s:1:"2";}i:2;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:3:"tag";s:1:"2";}}}i:44;a:3:{s:8:"sphinxql";s:77:"select tag+1 as t, gr, tag, gr from dist5 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:3:"tag";s:1:"2";}i:1;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:3:"tag";s:1:"2";}i:2;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:3:"tag";s:1:"2";}}}i:45;a:3:{s:8:"sphinxql";s:79:"select tag+1 as t, gr, gr+1 as g from dist0 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:1:"g";s:1:"3";}i:1;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:1:"g";s:1:"4";}i:2;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:1:"g";s:1:"4";}}}i:46;a:3:{s:8:"sphinxql";s:79:"select tag+1 as t, gr, gr+1 as g from dist5 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:1:"g";s:1:"3";}i:1;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:1:"g";s:1:"4";}i:2;a:3:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:1:"g";s:1:"4";}}}i:47;a:3:{s:8:"sphinxql";s:89:"select tag+1 as t, gr, tag as tt, tag, tag from dist0 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}i:1;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}}}i:48;a:3:{s:8:"sphinxql";s:89:"select tag+1 as t, gr, tag as tt, tag, tag from dist5 where match('test') ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"2";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}i:1;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}i:2;a:4:{s:1:"t";s:1:"3";s:2:"gr";s:1:"3";s:2:"tt";s:1:"2";s:3:"tag";s:1:"2";}}}i:49;a:3:{s:8:"sphinxql";s:44:"select tag+1 as t from dist0 ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:1:"t";s:1:"3";}i:1;a:1:{s:1:"t";s:1:"3";}i:2;a:1:{s:1:"t";s:1:"3";}}}i:50;a:3:{s:8:"sphinxql";s:44:"select tag+1 as t from dist5 ORDER BY gr ASC";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:1:{s:1:"t";s:1:"3";}i:1;a:1:{s:1:"t";s:1:"3";}i:2;a:1:{s:1:"t";s:1:"3";}}}i:51;a:3:{s:8:"sphinxql";s:39:"select * from dist0 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:52;a:3:{s:8:"sphinxql";s:39:"select * from dist5 where match('test')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:1:"1";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}i:1;a:3:{s:2:"id";s:1:"2";s:3:"tag";s:1:"2";s:2:"gr";s:1:"2";}i:2;a:3:{s:2:"id";s:1:"3";s:3:"tag";s:1:"2";s:2:"gr";s:1:"3";}}}i:53;a:3:{s:8:"sphinxql";s:60:"select @id as idd, gr,tag+1 as t from dist50 order by gr asc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:3:{s:3:"idd";s:1:"2";s:2:"gr";s:1:"2";s:1:"t";s:1:"3";}i:1;a:3:{s:3:"idd";s:1:"1";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:2;a:3:{s:3:"idd";s:1:"3";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:3;a:3:{s:3:"idd";s:2:"10";s:2:"gr";s:1:"4";s:1:"t";s:1:"3";}i:4;a:3:{s:3:"idd";s:1:"9";s:2:"gr";s:1:"5";s:1:"t";s:1:"3";}i:5;a:3:{s:3:"idd";s:1:"8";s:2:"gr";s:1:"6";s:1:"t";s:1:"3";}i:6;a:3:{s:3:"idd";s:1:"7";s:2:"gr";s:1:"7";s:1:"t";s:1:"3";}i:7;a:3:{s:3:"idd";s:1:"6";s:2:"gr";s:1:"8";s:1:"t";s:1:"3";}i:8;a:3:{s:3:"idd";s:1:"5";s:2:"gr";s:1:"9";s:1:"t";s:1:"3";}i:9;a:3:{s:3:"idd";s:1:"4";s:2:"gr";s:2:"10";s:1:"t";s:1:"3";}}}i:54;a:3:{s:8:"sphinxql";s:57:"select @id as idd, tag+1 as t from dist50 order by gr asc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:1;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"3";s:1:"t";s:1:"3";}i:3;a:2:{s:3:"idd";s:2:"10";s:1:"t";s:1:"3";}i:4;a:2:{s:3:"idd";s:1:"9";s:1:"t";s:1:"3";}i:5;a:2:{s:3:"idd";s:1:"8";s:1:"t";s:1:"3";}i:6;a:2:{s:3:"idd";s:1:"7";s:1:"t";s:1:"3";}i:7;a:2:{s:3:"idd";s:1:"6";s:1:"t";s:1:"3";}i:8;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:1:"3";}i:9;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:1:"3";}}}i:55;a:3:{s:8:"sphinxql";s:60:"select @id as idd, gr,tag+1 as t from dist52 order by gr asc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:3:{s:3:"idd";s:1:"2";s:2:"gr";s:1:"2";s:1:"t";s:1:"3";}i:1;a:3:{s:3:"idd";s:1:"1";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:2;a:3:{s:3:"idd";s:1:"3";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:3;a:3:{s:3:"idd";s:2:"10";s:2:"gr";s:1:"4";s:1:"t";s:1:"3";}i:4;a:3:{s:3:"idd";s:1:"9";s:2:"gr";s:1:"5";s:1:"t";s:1:"3";}i:5;a:3:{s:3:"idd";s:1:"8";s:2:"gr";s:1:"6";s:1:"t";s:1:"3";}i:6;a:3:{s:3:"idd";s:1:"7";s:2:"gr";s:1:"7";s:1:"t";s:1:"3";}i:7;a:3:{s:3:"idd";s:1:"6";s:2:"gr";s:1:"8";s:1:"t";s:1:"3";}i:8;a:3:{s:3:"idd";s:1:"5";s:2:"gr";s:1:"9";s:1:"t";s:1:"3";}i:9;a:3:{s:3:"idd";s:1:"4";s:2:"gr";s:2:"10";s:1:"t";s:1:"3";}}}i:56;a:3:{s:8:"sphinxql";s:57:"select @id as idd, tag+1 as t from dist52 order by gr asc";s:10:"total_rows";i:10;s:4:"rows";a:10:{i:0;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:1;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"3";s:1:"t";s:1:"3";}i:3;a:2:{s:3:"idd";s:2:"10";s:1:"t";s:1:"3";}i:4;a:2:{s:3:"idd";s:1:"9";s:1:"t";s:1:"3";}i:5;a:2:{s:3:"idd";s:1:"8";s:1:"t";s:1:"3";}i:6;a:2:{s:3:"idd";s:1:"7";s:1:"t";s:1:"3";}i:7;a:2:{s:3:"idd";s:1:"6";s:1:"t";s:1:"3";}i:8;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:1:"3";}i:9;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:1:"3";}}}i:57;a:3:{s:8:"sphinxql";s:59:"select @id as idd, gr,tag+1 as t from idx51 order by gr asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:3:{s:3:"idd";s:1:"2";s:2:"gr";s:1:"2";s:1:"t";s:1:"3";}i:1;a:3:{s:3:"idd";s:1:"1";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:2;a:3:{s:3:"idd";s:1:"3";s:2:"gr";s:1:"3";s:1:"t";s:1:"3";}i:3;a:3:{s:3:"idd";s:1:"5";s:2:"gr";s:1:"9";s:1:"t";s:1:"3";}i:4;a:3:{s:3:"idd";s:1:"4";s:2:"gr";s:2:"10";s:1:"t";s:1:"3";}}}i:58;a:3:{s:8:"sphinxql";s:56:"select @id as idd, tag+1 as t from idx51 order by gr asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:1;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"3";s:1:"t";s:1:"3";}i:3;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:1:"3";}i:4;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:1:"3";}}}i:59;a:3:{s:8:"sphinxql";s:70:"select @id as idd, gr+1 as t from distfake group by t order by tag asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"4";}i:1;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:2:"11";}i:3;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:2:"10";}}}i:60;a:3:{s:8:"sphinxql";s:72:"select @id as idd, gr+1 as t from disthfakeb group by t order by tag asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"4";}i:1;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:2:"11";}i:3;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:2:"10";}}}i:61;a:3:{s:8:"sphinxql";s:71:"select @id as idd, gr+1 as t from disthfake group by t order by tag asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"4";}i:1;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:2:"11";}i:3;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:2:"10";}}}i:62;a:3:{s:8:"sphinxql";s:71:"select @id as idd, gr+1 as t from distlfake group by t order by tag asc";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:2:{s:3:"idd";s:1:"1";s:1:"t";s:1:"4";}i:1;a:2:{s:3:"idd";s:1:"2";s:1:"t";s:1:"3";}i:2;a:2:{s:3:"idd";s:1:"4";s:1:"t";s:2:"11";}i:3;a:2:{s:3:"idd";s:1:"5";s:1:"t";s:2:"10";}}}i:63;a:2:{s:8:"sphinxql";s:51:"select * from dist50 where match ( 'broken merge' )";s:10:"total_rows";i:0;}}}sphinx-2.0.4-release/test/test_067/0000755000176700017710000000000011724063141016274 5ustar deogardeogarsphinx-2.0.4-release/test/test_067/test.xml0000644000176700017710000000604011514064650020000 0ustar deogardeogar aggregate functions 3 indexer { mem_limit = 16M } searchd { dist_threads = 2 } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = gid sql_attr_float = price sql_attr_uint = views } source srctest2 : srctest { sql_query = SELECT document_id+10, body, gid, price, views FROM test_table } source srctest3 : srctest { sql_query = SELECT document_id+20, body, gid, price*2 price, views FROM test_table } index test { source = srctest path = /test docinfo = extern } index test3 { source = srctest3 path = /test3 docinfo = extern } index dist { type = distributed local = test agent = :test2 } index dist2 { type = distributed agent = :test2 agent = :testr } index dist3 { type = distributed local = test agent = :test2,test4 } index test2 { source = srctest2 path = /test2 docinfo = extern } index test4 : test2 { path = /test4 } index testr { source = srctest path = /testr docinfo = extern } CREATE TABLE test_table ( document_id INTEGER NOT NULL PRIMARY KEY, body VARCHAR(255) NOT NULL, gid INTEGER NOT NULL, price FLOAT NOT NULL, views INTEGER NOT NULL ) DROP TABLE IF EXISTS test_table INSERT INTO test_table VALUES ( 1, 'dummy', 1, 20.00, 3 ), ( 2, 'dummy', 1, 15.00, 15 ), ( 3, 'dummy', 1, 10.00, 7 ), ( 4, 'dummy', 1, 10.00, 40 ) sphinx-2.0.4-release/test/test_067/model.bin0000644000176700017710000003036611514064650020101 0ustar deogardeogara:2:{i:0;a:12:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:3:{s:5:"price";i:5;s:4:"xgid";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:5:"price";d:20;s:4:"xgid";i:10;s:8:"avgprice";d:13.75;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:4;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:4;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:12;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.009";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:195;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:12;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.009";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:13.75;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";d:40;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:27.5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:20.625;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:1;a:12:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:3:{s:5:"price";i:5;s:4:"xgid";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:3:{s:5:"price";d:20;s:4:"xgid";i:10;s:8:"avgprice";d:13.75;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:4;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:4;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:3;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:4;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.005";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:5;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:6;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:7;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:6:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:12;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:8;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:195;s:8:"avgprice";d:13.75;s:8:"@groupby";i:1;s:6:"@count";i:12;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.006";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:9;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:13.75;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:10;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;}s:7:"matches";a:1:{i:21;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"gid";i:1;s:5:"price";d:40;s:5:"views";i:3;s:8:"sumviews";i:65;s:8:"avgprice";d:27.5;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.002";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:11;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:0:{}s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";i:5;s:5:"views";i:1;s:8:"sumviews";i:1;s:8:"avgprice";i:5;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:7:{s:3:"gid";i:1;s:5:"price";d:20;s:5:"views";i:3;s:8:"sumviews";i:130;s:8:"avgprice";d:20.625;s:8:"@groupby";i:1;s:6:"@count";i:8;}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_105/0000755000176700017710000000000011724063141016265 5ustar deogardeogarsphinx-2.0.4-release/test/test_105/test.xml0000644000176700017710000000417111615773654020011 0ustar deogardeogar rotation - *nix only searchd { workers = fork workers = prefork } source sql { type = mysql sql_query = select id, text from test_table where mode = (select mode from test_table where id = 1000); } index index { source = sql path = /index } source sql1 : sql { sql_query = select id, text, 1 as idd1 from test_table; sql_attr_uint = idd1 } source sql2 : sql { sql_query = select id, text, 1 as idd1, 2 as idd2 from test_table; sql_attr_uint = idd1 sql_attr_uint = idd2 } index i1 { source = sql1 path = /index1 docinfo = inline } index i2 { source = sql2 path = /index2 docinfo = inline } drop table if exists test_table create table test_table ( id int not null, text varchar(255) not null, mode int ); insert into test_table values ( 1, 'first', 1 ), ( 2, 'second', 1 ), ( 3, 'third', 1 ), ( 4, 'fourth', 1 ), ( 5, 'fifth', 1 ), ( 1, 'one', 2 ), ( 2, 'two', 2 ), ( 3, 'three', 2 ), ( 1000, '', 1 ); Query ( $words, "index" ); if ( $result ) { unset ( $result["time"] ); return $result; } else return $client->GetLastError(); '); $results = array(); $errors = ''; $results[] = $query ( $client, 'first' ); $results[] = $query ( $client, 'one' ); mysql_query ( 'update test_table set mode = 3 where id = 1000' ); RunIndexer ( $errors, '--all --rotate' ); sleep(2); $results[] = $query ( $client, 'first' ); $results[] = $query ( $client, 'one' ); mysql_query ( 'update test_table set mode = 2 where id = 1000' ); RunIndexer ( $errors, '--all --rotate' ); sleep(2); $results[] = $query ( $client, 'first' ); $results[] = $query ( $client, 'one' ); ]]> sphinx-2.0.4-release/test/test_105/model.bin0000644000176700017710000000616611327565006020076 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:6:{i:0;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:1;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:2;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:5;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}i:1;a:1:{i:0;a:6:{i:0;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:1;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}i:2;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:3;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:4;a:8:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:5:"words";a:1:{s:5:"first";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}}i:5;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:5:"words";a:1:{s:3:"one";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}}}}}sphinx-2.0.4-release/test/test_021/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_021/test.xml0000644000176700017710000000462111111073424017761 0ustar deogardeogar sorting indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT document_id,tag1,tag2,tag3,tag4,body FROM test_table sql_attr_uint = tag1 sql_attr_uint = tag2:8 sql_attr_float = tag3 sql_attr_float = tag4 } index test { source = srctest path = /test min_word_len = 1 min_prefix_len = 1 enable_star = 1 charset_type = utf-8 } test* test* test* test* test* test* test* test* test* test* test* test* test* test* CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `tag1` int(11) NOT NULL default '0', `tag2` int(11) NOT NULL default '0', `tag3` FLOAT, `tag4` FLOAT, `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 5, -4.0, 1.5, 'test1' ), ( 2, 2, 7, 12.0, 3.14, 'test2' ), ( 3, 2, 4, -8.0, 5.2, 'test3' ), ( 4, 3, 3, 5.0, 7.6, 'test4' ), ( 5, 3, 8, 1.0, 45.2, 'test5' ), ( 6, 3, 9, 4.0, 17.2, 'test6' ), ( 7, 4, 4, 6.0, -0.8, 'test7' ), ( 8, 4, 1, 7.0, 99.0, 'test8' ), ( 9, 4, 3, 2.0, -16.1, 'test9' ), ( 10,4, 2, 12.0, 0.0, 'test10' ) sphinx-2.0.4-release/test/test_021/model.bin0000644000176700017710000006503611455516446020102 0ustar deogardeogara:1:{i:0;a:14:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:8.5;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:4.19999980926513671875;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:-0.625;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:-0.66666662693023681640625;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:-3;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:-3.6666667461395263671875;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:-4;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:-5;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:-10;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:-11.71428585052490234375;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.058";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:86;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:39;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:28;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:28;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:18;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:11;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:11;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:10;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:-19;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:-30;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:12;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:12;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:8;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:7;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:6;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:5;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:4;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:4;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:2;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:1;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:8;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:6;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:6;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:6;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:6;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:4;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:4;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:4;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:2;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:2;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:99;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:46;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:18;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:8;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:6;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:4;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:2;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:-0;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:0;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:-16;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:99;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:45;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:17;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:7;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:5;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:3;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:1;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:0;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:-1;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:-17;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:1.2841091156005859375;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:1.219179630279541015625;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:1.06823217868804931640625;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:1;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:-0.020649373531341552734375;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:-0.4149382412433624267578125;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:-0.54206085205078125;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:-0.95938599109649658203125;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:-0.9984061717987060546875;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:-1.07557761669158935546875;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:3.9883544445037841796875;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:3.9883544445037841796875;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:3.9883544445037841796875;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:3.9883544445037841796875;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:3.1606960296630859375;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:3.1606960296630859375;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:3.1606960296630859375;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:1.99417722225189208984375;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:1.99417722225189208984375;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:0;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:56.598148345947265625;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:56.3302001953125;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:56.012363433837890625;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:55.598148345947265625;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:23.085536956787109375;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:22.91396331787109375;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:21.8175868988037109375;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:10.0348072052001953125;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:9.38905620574951171875;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:4.954349517822265625;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:9;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:1;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:1;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:0;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:0;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:0;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:0;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:0;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:0;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:0;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:0;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:9;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:8;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:7;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:5;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:4;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:4;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:3;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:3;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:2;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:1;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:5:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;s:5:"@expr";i:5;}s:7:"matches";a:10:{i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;s:5:"@expr";d:12;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;s:5:"@expr";d:12;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;s:5:"@expr";d:7;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;s:5:"@expr";d:6;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;s:5:"@expr";d:5;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;s:5:"@expr";d:4;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;s:5:"@expr";d:2;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;s:5:"@expr";d:1;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;s:5:"@expr";d:-4;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:5:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;s:5:"@expr";d:-8;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:12;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;}s:7:"matches";a:10:{i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;}}i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}i:13;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"tag1";i:1;s:4:"tag2";i:1;s:4:"tag3";i:5;s:4:"tag4";i:5;}s:7:"matches";a:10:{i:2;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"7";s:4:"tag3";d:12;s:4:"tag4";d:3.1400001049041748046875;}}i:10;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"2";s:4:"tag3";d:12;s:4:"tag4";d:0;}}i:8;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"1";s:4:"tag3";d:7;s:4:"tag4";d:99;}}i:7;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"4";s:4:"tag3";d:6;s:4:"tag4";d:-0.800000011920928955078125;}}i:4;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"3";s:4:"tag3";d:5;s:4:"tag4";d:7.599999904632568359375;}}i:6;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"9";s:4:"tag3";d:4;s:4:"tag4";d:17.200000762939453125;}}i:9;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"4";s:4:"tag2";s:1:"3";s:4:"tag3";d:2;s:4:"tag4";d:-16.1000003814697265625;}}i:5;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"3";s:4:"tag2";s:1:"8";s:4:"tag3";d:1;s:4:"tag4";d:45.200000762939453125;}}i:1;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"1";s:4:"tag2";s:1:"5";s:4:"tag3";d:-4;s:4:"tag4";d:1.5;}}i:3;a:2:{s:6:"weight";s:4:"1281";s:5:"attrs";a:4:{s:4:"tag1";s:1:"2";s:4:"tag2";s:1:"4";s:4:"tag3";d:-8;s:4:"tag4";d:5.19999980926513671875;}}}s:5:"total";s:2:"10";s:11:"total_found";s:2:"10";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:5:"test*";a:2:{s:4:"docs";s:2:"10";s:4:"hits";s:2:"10";}}s:5:"query";s:5:"test*";}}}sphinx-2.0.4-release/test/test_089/0000755000176700017710000000000011724063141016300 5ustar deogardeogarsphinx-2.0.4-release/test/test_089/test.xml0000644000176700017710000000263411607264647020024 0ustar deogardeogar SphinxQL: floats without leading zero indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index test { type = rt path = /test rt_attr_float = flt rt_field = title } source src_test2 { type = mysql sql_query = SELECT id, text, text as ord, text as wc, 1 as gid FROM table2 sql_attr_uint = gid sql_attr_str2ordinal = ord sql_attr_str2wordcount = wc } index test2 { source = src_test2 path = /test2 docinfo = extern charset_type = utf-8 } insert into test (id,flt,title) values (1,0.3,'zero'),(2,.3,'no zero') select .1 as zz, flt from test where match('zero') select 0.1 as zz, flt from test where match('zero') insert into test (id,flt,title) values (10,-123,'neg') select * from test where id=-10 select * from test where id=10 select * from test2 select * from test2 where match ( 'c' ) CREATE TABLE `table2` ( `id` int(11), `text` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `table2` INSERT INTO `table2` VALUES ( 1, 'a b c' ), ( 2, 'b c d' ), ( 3, 'c d e f' ) sphinx-2.0.4-release/test/test_089/model.bin0000644000176700017710000000370211607264647020112 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:2:{s:8:"sphinxql";s:70:"insert into test (id,flt,title) values (1,0.3,'zero'),(2,.3,'no zero')";s:14:"total_affected";i:2;}i:1;a:3:{s:8:"sphinxql";s:50:"select .1 as zz, flt from test where match('zero')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1356";s:3:"flt";s:8:"0.300000";s:2:"zz";s:8:"0.100000";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1356";s:3:"flt";s:8:"0.300000";s:2:"zz";s:8:"0.100000";}}}i:2;a:3:{s:8:"sphinxql";s:51:"select 0.1 as zz, flt from test where match('zero')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1356";s:3:"flt";s:8:"0.300000";s:2:"zz";s:8:"0.100000";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1356";s:3:"flt";s:8:"0.300000";s:2:"zz";s:8:"0.100000";}}}i:3;a:2:{s:8:"sphinxql";s:54:"insert into test (id,flt,title) values (10,-123,'neg')";s:14:"total_affected";i:1;}i:4;a:2:{s:8:"sphinxql";s:31:"select * from test where id=-10";s:10:"total_rows";i:0;}i:5;a:3:{s:8:"sphinxql";s:30:"select * from test where id=10";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"flt";s:11:"-123.000000";}}}i:6;a:3:{s:8:"sphinxql";s:19:"select * from test2";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"ord";s:1:"0";s:2:"wc";s:1:"3";s:3:"gid";s:1:"1";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"ord";s:1:"1";s:2:"wc";s:1:"3";s:3:"gid";s:1:"1";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"ord";s:1:"2";s:2:"wc";s:1:"4";s:3:"gid";s:1:"1";}}}i:7;a:3:{s:8:"sphinxql";s:39:"select * from test2 where match ( 'c' )";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1319";s:3:"ord";s:1:"0";s:2:"wc";s:1:"3";s:3:"gid";s:1:"1";}i:1;a:5:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1319";s:3:"ord";s:1:"1";s:2:"wc";s:1:"3";s:3:"gid";s:1:"1";}i:2;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1319";s:3:"ord";s:1:"2";s:2:"wc";s:1:"4";s:3:"gid";s:1:"1";}}}}}sphinx-2.0.4-release/test/test_061/0000755000176700017710000000000011724063141016266 5ustar deogardeogarsphinx-2.0.4-release/test/test_061/test.xml0000644000176700017710000000220011323636205017763 0ustar deogardeogar merge vs delta decoding indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM test_table WHERE document_id in (2147483632,2147483652) } source srcdelta { type = mysql sql_query = SELECT * FROM test_table WHERE document_id in (2147483632,2147483672) } index main { source = srcmain path = /main charset_type = utf-8 } index delta { source = srcdelta path = /delta charset_type = utf-8 } --merge main delta main CREATE TABLE `test_table` ( `document_id` int(11) UNSIGNED NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 2147483632, 'main' ), ( 2147483652, 'main' ), ( 2147483672, 'main' ) sphinx-2.0.4-release/test/test_061/model.bin0000644000176700017710000000104011162523705020056 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2147483632;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}s:10:"2147483652";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}s:10:"2147483672";a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"main";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"main";}}}sphinx-2.0.4-release/test/test_162/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_162/test.xml0000644000176700017710000000423411513023135017767 0ustar deogardeogar string vs within group order indexer { mem_limit = 16M } searchd { workers = threads } source src { type = mysql sql_query = SELECT id, idd1, str1, body FROM test_table sql_attr_uint = idd1 sql_attr_string = str1 } index plain { source = src docinfo = extern charset_type = sbcs path = /plain } CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `idd1` int(11) NOT NULL default '0', `str1` varchar(1024) NOT NULL default '', `body` varchar(1024) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 'a aa', 'the dog' ), ( 2, 11, 'c cc', 'the cat' ), ( 3, 11, 'a a', 'the bird' ), ( 4, 11, 'cc c', 'cat eats bird' ), ( 5, 3, 'a a', 'dog eats cat' ), ( 6, 1, 'c cc', 'bird' ), ( 7, 1, 'a a', 'the cat' ), ( 8, 1, 'a a', 'eats' ), ( 9, 3, 'c cc', 'the' ) the | dog | cat the | eats | bird select * from plain group by idd1 within group order by str1 asc select * from plain group by idd1 within group order by str1 desc select * from plain where match('the | dog | cat') group by idd1 within group order by str1 asc select * from plain where match('the | eats | bird') group by idd1 within group order by str1 desc sphinx-2.0.4-release/test/test_162/model.bin0000644000176700017710000001273611513023135020066 0ustar deogardeogara:1:{i:0;a:8:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:3:{i:8;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}}i:5;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:1:"3";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:2:"11";s:4:"str1";s:3:"a a";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:3:{i:6;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}}i:9;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:1:"3";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:4:{s:4:"idd1";s:2:"11";s:4:"str1";s:4:"cc c";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.003";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:3:{i:7;a:2:{s:6:"weight";s:4:"1513";s:5:"attrs";a:4:{s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}}i:5;a:2:{s:6:"weight";s:4:"1558";s:5:"attrs";a:4:{s:4:"idd1";s:1:"3";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:3;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";s:2:"11";s:4:"str1";s:3:"a a";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.005";s:5:"words";a:3:{s:3:"the";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:3:"dog";a:2:{s:4:"docs";s:1:"2";s:4:"hits";s:1:"2";}s:3:"cat";a:2:{s:4:"docs";s:1:"4";s:4:"hits";s:1:"4";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:"the | dog | cat";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:4:{s:4:"idd1";i:1;s:4:"str1";i:7;s:8:"@groupby";i:1;s:6:"@count";i:1;}s:7:"matches";a:3:{i:6;a:2:{s:6:"weight";s:4:"1527";s:5:"attrs";a:4:{s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}}i:9;a:2:{s:6:"weight";s:4:"1500";s:5:"attrs";a:4:{s:4:"idd1";s:1:"3";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}i:4;a:2:{s:6:"weight";s:4:"2555";s:5:"attrs";a:4:{s:4:"idd1";s:2:"11";s:4:"str1";s:4:"cc c";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.005";s:5:"words";a:3:{s:3:"the";a:2:{s:4:"docs";s:1:"5";s:4:"hits";s:1:"5";}s:4:"eats";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:4:"bird";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:"the | eats | bird";}i:4;a:3:{s:8:"sphinxql";s:64:"select * from plain group by idd1 within group order by str1 asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"a a";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}i:1;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}}}i:5;a:3:{s:8:"sphinxql";s:65:"select * from plain group by idd1 within group order by str1 desc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"cc c";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}i:1;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}i:2;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}}i:6;a:3:{s:8:"sphinxql";s:95:"select * from plain where match('the | dog | cat') group by idd1 within group order by str1 asc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1558";s:4:"idd1";s:1:"3";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}i:1;a:6:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1513";s:4:"idd1";s:1:"1";s:4:"str1";s:3:"a a";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"2";}i:2;a:6:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";s:4:"idd1";s:2:"11";s:4:"str1";s:3:"a a";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}}}i:7;a:3:{s:8:"sphinxql";s:98:"select * from plain where match('the | eats | bird') group by idd1 within group order by str1 desc";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2555";s:4:"idd1";s:2:"11";s:4:"str1";s:4:"cc c";s:8:"@groupby";s:2:"11";s:6:"@count";s:1:"3";}i:1;a:6:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1527";s:4:"idd1";s:1:"1";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"1";s:6:"@count";s:1:"4";}i:2;a:6:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1500";s:4:"idd1";s:1:"3";s:4:"str1";s:4:"c cc";s:8:"@groupby";s:1:"3";s:6:"@count";s:1:"2";}}}}}sphinx-2.0.4-release/test/test_183/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_183/test.xml0000644000176700017710000003554211655626605020022 0ustar deogardeogar support for 256 fields in disk/RT indexes indexer { mem_limit = 16M } searchd { workers = threads } source srctest { type = mysql sql_query = SELECT * FROM test_table } index test { source = srctest path = /test charset_type = utf-8 } source srctests { type = mysql sql_query = SELECT document_id, t21, t141, t241 FROM test_table } index tests { source = srctests path = /tests charset_type = utf-8 } index rt28 { type = rt path = /rt28 rt_field = field1 rt_field = field2 rt_field = field3 rt_field = field4 rt_field = field5 rt_field = field6 rt_field = field7 rt_field = field8 rt_field = field9 rt_field = field10 rt_field = field11 rt_field = field12 rt_field = field13 rt_field = field14 rt_field = field15 rt_field = field16 rt_field = field17 rt_field = field18 rt_field = field19 rt_field = field20 rt_field = field21 rt_field = field22 rt_field = field23 rt_field = field24 rt_field = field25 rt_field = field26 rt_field = field27 rt_field = field28 rt_attr_uint = attr1 } index rt40 { type = rt path = /rt40 rt_field = field1 rt_field = field2 rt_field = field3 rt_field = field4 rt_field = field5 rt_field = field6 rt_field = field7 rt_field = field8 rt_field = field9 rt_field = field10 rt_field = field11 rt_field = field12 rt_field = field13 rt_field = field14 rt_field = field15 rt_field = field16 rt_field = field17 rt_field = field18 rt_field = field19 rt_field = field20 rt_field = field21 rt_field = field22 rt_field = field23 rt_field = field24 rt_field = field25 rt_field = field26 rt_field = field27 rt_field = field28 rt_field = field29 rt_field = field30 rt_field = field31 rt_field = field32 rt_field = field33 rt_field = field34 rt_field = field35 rt_field = field36 rt_field = field37 rt_field = field38 rt_field = field39 rt_field = field40 rt_attr_uint = attr1 } select * from tests where match ('field_one') select * from test where match ('field_one') select * from tests where match ('field_two') select * from test where match ('field_two') select * from tests where match ('@t21 field_one') select * from tests where match ('@t141 field_one') select * from test where match ('@t21 field_one') select * from tests where match ('@t21 field_two') select * from test where match ('@t21 field_two') select * from tests where match ('@t141 field_three') select * from test where match ('@t141 field_three') select * from tests where match ('@t141 field_two') select * from test where match ('@t141 field_two') select * from tests where match ('@t241 field_two') select * from test where match ('@t241 field_two') select * from tests where match ('field_one field_two') select * from test where match ('field_one field_two') select * from tests where match ('field_one @t141 field_two') select * from test where match ('field_one @t141 field_two') select * from tests where match ('field_one @t21 field_two') select * from test where match ('field_one @t21 field_two') insert into rt40 (id,attr1, field1,field2,field3,field4,field5,field6,field7,field8,field9,field10, field11,field12,field13,field14,field15,field16,field17,field18,field19,field20, field21,field22,field23,field24,field25,field26,field27,field28,field29,field30, field31,field32,field33,field34,field35,field36,field37,field38,field39,field40) values (123,111,'kw1','kw2','kw3','kw4','kw5','kw6','kw7','kw8','kw9','kw10', 'kw11','kw12','kw13','kw14','kw15','kw16','kw17','kw18','kw19','kw20', 'kw21','kw22','kw23','kw24','kw25','kw26','kw27','kw28','kw29','kw30', 'kw31','kw32','kw33','kw34','kw35','kw36','kw37','kw38','kw39','kw40') select *, 1 as testid from rt40 where match('kw37') select *, 2 as testid from rt40 where match('@field5 kw37') select *, 3 as testid from rt40 where match('@field37 kw37') insert into rt28 (id,attr1, field1,field2,field3,field4,field5,field6,field7,field8,field9,field10, field11,field12,field13,field14,field15,field16,field17,field18,field19,field20, field21,field22,field23,field24,field25,field26,field27,field28) values (124,222, 'kw1','kw2','kw3','kw4','kw5','kw6','kw7','kw8','kw9','kw10', 'kw11','kw12','kw13','kw14','kw15','kw16','kw17','kw18','kw19','kw20', 'kw21','kw22','kw23','kw24','kw25','kw26','kw27','kw28') select *, 1 as testid from rt28 where match('kw17') select *, 2 as testid from rt28 where match('@field17 kw17') select *, 3 as testid from rt28 where match('@field3 kw17') insert into rt40 (id,field37) values (125,'kw37 copy2') select * from rt40 where match('kw37') select * from rt40 where match('@field37 kw37') insert into rt40 (id,field37) values (126,'kw37 copy3 ') select * from rt40 where match('kw37') select * from rt40 where match('@field37 kw37') insert into rt40 (id,field39) values (200, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (201, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (202, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (203, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (204, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (205, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (206, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (207, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (208, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (209, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (210, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (211, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (212, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (213, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (214, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (215, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (216, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (217, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (218, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (219, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (220, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (221, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (222, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (223, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (224, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (225, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (226, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (227, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (228, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (229, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (230, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (231, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (232, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (233, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (234, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (235, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (236, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (237, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (238, 'badger badger badger badger mushroom mushroom') insert into rt40 (id,field39) values (239, 'badger badger badger badger mushroom mushroom') select * from rt40 where match('kw37') select * from rt40 where match('@field37 kw37') select * from rt40 where match('@field39 badger') limit 40 show meta CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `t1` char(5),`t2` char(5),`t3` char(5),`t4` char(5),`t5` char(5),`t6` char(5),`t7` char(5),`t8` char(5),`t9` char(5),`t10` char(5), `t11` char(5),`t12` char(5),`t13` char(5),`t14` char(5),`t15` char(5),`t16` char(5),`t17` char(5),`t18` char(5),`t19` char(5),`t20` char(5), `t21` char(25),`t22` char(5),`t23` char(5),`t24` char(5),`t25` char(5),`t26` char(5),`t27` char(5),`t28` char(5),`t29` char(5),`t30` char(5), `t31` char(5),`t32` char(5),`t33` char(5),`t34` char(5),`t35` char(5),`t36` char(5),`t37` char(5),`t38` char(5),`t39` char(5),`t40` char(5), `t41` char(5),`t42` char(5),`t43` char(5),`t44` char(5),`t45` char(5),`t46` char(5),`t47` char(5),`t48` char(5),`t49` char(5),`t50` char(5), `t51` char(5),`t52` char(5),`t53` char(5),`t54` char(5),`t55` char(5),`t56` char(5),`t57` char(5),`t58` char(5),`t59` char(5),`t60` char(5), `t61` char(5),`t62` char(5),`t63` char(5),`t64` char(5),`t65` char(5),`t66` char(5),`t67` char(5),`t68` char(5),`t69` char(5),`t70` char(5), `t71` char(5),`t72` char(5),`t73` char(5),`t74` char(5),`t75` char(5),`t76` char(5),`t77` char(5),`t78` char(5),`t79` char(5),`t80` char(5), `t81` char(5),`t82` char(5),`t83` char(5),`t84` char(5),`t85` char(5),`t86` char(5),`t87` char(5),`t88` char(5),`t89` char(5),`t90` char(5), `t91` char(5),`t92` char(5),`t93` char(5),`t94` char(5),`t95` char(5),`t96` char(5),`t97` char(5),`t98` char(5),`t99` char(5),`t100` char(5), `t101` char(5),`t102` char(5),`t103` char(5),`t104` char(5),`t105` char(5),`t106` char(5),`t107` char(5),`t108` char(5),`t109` char(5),`t110` char(5), `t111` char(5),`t112` char(5),`t113` char(5),`t114` char(5),`t115` char(5),`t116` char(5),`t117` char(5),`t118` char(5),`t119` char(5),`t120` char(5), `t121` char(5),`t122` char(5),`t123` char(5),`t124` char(5),`t125` char(5),`t126` char(5),`t127` char(5),`t128` char(5),`t129` char(5),`t130` char(5), `t131` char(5),`t132` char(5),`t133` char(5),`t134` char(5),`t135` char(5),`t136` char(5),`t137` char(5),`t138` char(5),`t139` char(5),`t140` char(5), `t141` char(25),`t142` char(5),`t143` char(5),`t144` char(5),`t145` char(5),`t146` char(5),`t147` char(5),`t148` char(5),`t149` char(5),`t150` char(5), `t151` char(5),`t152` char(5),`t153` char(5),`t154` char(5),`t155` char(5),`t156` char(5),`t157` char(5),`t158` char(5),`t159` char(5),`t160` char(5), `t161` char(5),`t162` char(5),`t163` char(5),`t164` char(5),`t165` char(5),`t166` char(5),`t167` char(5),`t168` char(5),`t169` char(5),`t170` char(5), `t171` char(5),`t172` char(5),`t173` char(5),`t174` char(5),`t175` char(5),`t176` char(5),`t177` char(5),`t178` char(5),`t179` char(5),`t180` char(5), `t181` char(5),`t182` char(5),`t183` char(5),`t184` char(5),`t185` char(5),`t186` char(5),`t187` char(5),`t188` char(5),`t189` char(5),`t190` char(5), `t191` char(5),`t192` char(5),`t193` char(5),`t194` char(5),`t195` char(5),`t196` char(5),`t197` char(5),`t198` char(5),`t199` char(5),`t200` char(5), `t201` char(5),`t202` char(5),`t203` char(5),`t204` char(5),`t205` char(5),`t206` char(5),`t207` char(5),`t208` char(5),`t209` char(5),`t210` char(5), `t211` char(5),`t212` char(5),`t213` char(5),`t214` char(5),`t215` char(5),`t216` char(5),`t217` char(5),`t218` char(5),`t219` char(5),`t220` char(5), `t221` char(5),`t222` char(5),`t223` char(5),`t224` char(5),`t225` char(5),`t226` char(5),`t227` char(5),`t228` char(5),`t229` char(5),`t230` char(5), `t231` char(5),`t232` char(5),`t233` char(5),`t234` char(5),`t235` char(5),`t236` char(5),`t237` char(5),`t238` char(5),`t239` char(5),`t240` char(5), `t241` char(25),`t242` char(5),`t243` char(5),`t244` char(5),`t245` char(5),`t246` char(5),`t247` char(5),`t248` char(5),`t249` char(5),`t250` char(5), `t251` char(5),`t252` char(5),`t253` char(5),`t254` char(5),`t255` char(5),`t256` char(5) ); DROP TABLE IF EXISTS `test_table`; INSERT INTO `test_table` (document_id, t21, t141, t241) VALUES ( 1, 'field_one', 'field_one field_one', 'field_one field_two' ), ( 2, 'field_three', 'field_two', 'field_three' ), ( 3, 'field_one', 'field_one', 'field_one'); sphinx-2.0.4-release/test/test_183/model.bin0000644000176700017710000004214111655626605020104 0ustar deogardeogara:1:{i:0;a:79:{i:0;a:3:{s:8:"sphinxql";s:45:"select * from tests where match ('field_one')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"3500";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"3500";}}}i:1;a:3:{s:8:"sphinxql";s:44:"select * from test where match ('field_one')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";}}}i:2;a:3:{s:8:"sphinxql";s:45:"select * from tests where match ('field_two')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:3;a:3:{s:8:"sphinxql";s:44:"select * from test where match ('field_two')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:4;a:3:{s:8:"sphinxql";s:50:"select * from tests where match ('@t21 field_one')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";}}}i:5;a:3:{s:8:"sphinxql";s:51:"select * from tests where match ('@t141 field_one')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";}}}i:6;a:3:{s:8:"sphinxql";s:49:"select * from test where match ('@t21 field_one')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}i:1;a:2:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1500";}}}i:7;a:2:{s:8:"sphinxql";s:50:"select * from tests where match ('@t21 field_two')";s:10:"total_rows";i:0;}i:8;a:2:{s:8:"sphinxql";s:49:"select * from test where match ('@t21 field_two')";s:10:"total_rows";i:0;}i:9;a:2:{s:8:"sphinxql";s:53:"select * from tests where match ('@t141 field_three')";s:10:"total_rows";i:0;}i:10;a:2:{s:8:"sphinxql";s:52:"select * from test where match ('@t141 field_three')";s:10:"total_rows";i:0;}i:11;a:3:{s:8:"sphinxql";s:51:"select * from tests where match ('@t141 field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:12;a:3:{s:8:"sphinxql";s:50:"select * from test where match ('@t141 field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1500";}}}i:13;a:3:{s:8:"sphinxql";s:51:"select * from tests where match ('@t241 field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}}}i:14;a:3:{s:8:"sphinxql";s:50:"select * from test where match ('@t241 field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1500";}}}i:15;a:3:{s:8:"sphinxql";s:55:"select * from tests where match ('field_one field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"4500";}}}i:16;a:3:{s:8:"sphinxql";s:54:"select * from test where match ('field_one field_two')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:2:{s:2:"id";s:1:"1";s:6:"weight";s:4:"4500";}}}i:17;a:2:{s:8:"sphinxql";s:61:"select * from tests where match ('field_one @t141 field_two')";s:10:"total_rows";i:0;}i:18;a:2:{s:8:"sphinxql";s:60:"select * from test where match ('field_one @t141 field_two')";s:10:"total_rows";i:0;}i:19;a:2:{s:8:"sphinxql";s:60:"select * from tests where match ('field_one @t21 field_two')";s:10:"total_rows";i:0;}i:20;a:2:{s:8:"sphinxql";s:59:"select * from test where match ('field_one @t21 field_two')";s:10:"total_rows";i:0;}i:21;a:2:{s:8:"sphinxql";s:640:"insert into rt40 (id,attr1, field1,field2,field3,field4,field5,field6,field7,field8,field9,field10, field11,field12,field13,field14,field15,field16,field17,field18,field19,field20, field21,field22,field23,field24,field25,field26,field27,field28,field29,field30, field31,field32,field33,field34,field35,field36,field37,field38,field39,field40) values (123,111,'kw1','kw2','kw3','kw4','kw5','kw6','kw7','kw8','kw9','kw10', 'kw11','kw12','kw13','kw14','kw15','kw16','kw17','kw18','kw19','kw20', 'kw21','kw22','kw23','kw24','kw25','kw26','kw27','kw28','kw29','kw30', 'kw31','kw32','kw33','kw34','kw35','kw36','kw37','kw38','kw39','kw40')";s:14:"total_affected";i:1;}i:22;a:3:{s:8:"sphinxql";s:51:"select *, 1 as testid from rt40 where match('kw37')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1500";s:5:"attr1";s:3:"111";s:6:"testid";s:1:"1";}}}i:23;a:2:{s:8:"sphinxql";s:59:"select *, 2 as testid from rt40 where match('@field5 kw37')";s:10:"total_rows";i:0;}i:24;a:3:{s:8:"sphinxql";s:60:"select *, 3 as testid from rt40 where match('@field37 kw37')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1500";s:5:"attr1";s:3:"111";s:6:"testid";s:1:"3";}}}i:25;a:2:{s:8:"sphinxql";s:458:"insert into rt28 (id,attr1, field1,field2,field3,field4,field5,field6,field7,field8,field9,field10, field11,field12,field13,field14,field15,field16,field17,field18,field19,field20, field21,field22,field23,field24,field25,field26,field27,field28) values (124,222, 'kw1','kw2','kw3','kw4','kw5','kw6','kw7','kw8','kw9','kw10', 'kw11','kw12','kw13','kw14','kw15','kw16','kw17','kw18','kw19','kw20', 'kw21','kw22','kw23','kw24','kw25','kw26','kw27','kw28')";s:14:"total_affected";i:1;}i:26;a:3:{s:8:"sphinxql";s:51:"select *, 1 as testid from rt28 where match('kw17')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:3:"124";s:6:"weight";s:4:"1500";s:5:"attr1";s:3:"222";s:6:"testid";s:1:"1";}}}i:27;a:3:{s:8:"sphinxql";s:60:"select *, 2 as testid from rt28 where match('@field17 kw17')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:4:{s:2:"id";s:3:"124";s:6:"weight";s:4:"1500";s:5:"attr1";s:3:"222";s:6:"testid";s:1:"2";}}}i:28;a:2:{s:8:"sphinxql";s:59:"select *, 3 as testid from rt28 where match('@field3 kw17')";s:10:"total_rows";i:0;}i:29;a:2:{s:8:"sphinxql";s:55:"insert into rt40 (id,field37) values (125,'kw37 copy2')";s:14:"total_affected";i:1;}i:30;a:3:{s:8:"sphinxql";s:38:"select * from rt40 where match('kw37')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1356";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1356";s:5:"attr1";s:1:"0";}}}i:31;a:3:{s:8:"sphinxql";s:47:"select * from rt40 where match('@field37 kw37')";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1356";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1356";s:5:"attr1";s:1:"0";}}}i:32;a:2:{s:8:"sphinxql";s:56:"insert into rt40 (id,field37) values (126,'kw37 copy3 ')";s:14:"total_affected";i:1;}i:33;a:3:{s:8:"sphinxql";s:38:"select * from rt40 where match('kw37')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1319";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1319";s:5:"attr1";s:1:"0";}i:2;a:3:{s:2:"id";s:3:"126";s:6:"weight";s:4:"1319";s:5:"attr1";s:1:"0";}}}i:34;a:3:{s:8:"sphinxql";s:47:"select * from rt40 where match('@field37 kw37')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1319";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1319";s:5:"attr1";s:1:"0";}i:2;a:3:{s:2:"id";s:3:"126";s:6:"weight";s:4:"1319";s:5:"attr1";s:1:"0";}}}i:35;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (200, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:36;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (201, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:37;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (202, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:38;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (203, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:39;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (204, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:40;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (205, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:41;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (206, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:42;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (207, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:43;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (208, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:44;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (209, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:45;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (210, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:46;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (211, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:47;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (212, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:48;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (213, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:49;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (214, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:50;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (215, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:51;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (216, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:52;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (217, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:53;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (218, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:54;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (219, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:55;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (220, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:56;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (221, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:57;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (222, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:58;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (223, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:59;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (224, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:60;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (225, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:61;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (226, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:62;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (227, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:63;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (228, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:64;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (229, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:65;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (230, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:66;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (231, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:67;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (232, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:68;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (233, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:69;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (234, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:70;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (235, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:71;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (236, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:72;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (237, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:73;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (238, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:74;a:2:{s:8:"sphinxql";s:91:"insert into rt40 (id,field39) values (239, 'badger badger badger badger mushroom mushroom')";s:14:"total_affected";i:1;}i:75;a:3:{s:8:"sphinxql";s:38:"select * from rt40 where match('kw37')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1657";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1657";s:5:"attr1";s:1:"0";}i:2;a:3:{s:2:"id";s:3:"126";s:6:"weight";s:4:"1657";s:5:"attr1";s:1:"0";}}}i:76;a:3:{s:8:"sphinxql";s:47:"select * from rt40 where match('@field37 kw37')";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:3:{s:2:"id";s:3:"123";s:6:"weight";s:4:"1657";s:5:"attr1";s:3:"111";}i:1;a:3:{s:2:"id";s:3:"125";s:6:"weight";s:4:"1657";s:5:"attr1";s:1:"0";}i:2;a:3:{s:2:"id";s:3:"126";s:6:"weight";s:4:"1657";s:5:"attr1";s:1:"0";}}}i:77;a:3:{s:8:"sphinxql";s:58:"select * from rt40 where match('@field39 badger') limit 40";s:10:"total_rows";i:40;s:4:"rows";a:40:{i:0;a:3:{s:2:"id";s:3:"200";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:1;a:3:{s:2:"id";s:3:"201";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:2;a:3:{s:2:"id";s:3:"202";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:3;a:3:{s:2:"id";s:3:"203";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:4;a:3:{s:2:"id";s:3:"204";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:5;a:3:{s:2:"id";s:3:"205";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:6;a:3:{s:2:"id";s:3:"206";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:7;a:3:{s:2:"id";s:3:"207";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:8;a:3:{s:2:"id";s:3:"208";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:9;a:3:{s:2:"id";s:3:"209";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:10;a:3:{s:2:"id";s:3:"210";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:11;a:3:{s:2:"id";s:3:"211";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:12;a:3:{s:2:"id";s:3:"212";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:13;a:3:{s:2:"id";s:3:"213";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:14;a:3:{s:2:"id";s:3:"214";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:15;a:3:{s:2:"id";s:3:"215";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:16;a:3:{s:2:"id";s:3:"216";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:17;a:3:{s:2:"id";s:3:"217";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:18;a:3:{s:2:"id";s:3:"218";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:19;a:3:{s:2:"id";s:3:"219";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:20;a:3:{s:2:"id";s:3:"220";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:21;a:3:{s:2:"id";s:3:"221";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:22;a:3:{s:2:"id";s:3:"222";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:23;a:3:{s:2:"id";s:3:"223";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:24;a:3:{s:2:"id";s:3:"224";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:25;a:3:{s:2:"id";s:3:"225";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:26;a:3:{s:2:"id";s:3:"226";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:27;a:3:{s:2:"id";s:3:"227";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:28;a:3:{s:2:"id";s:3:"228";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:29;a:3:{s:2:"id";s:3:"229";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:30;a:3:{s:2:"id";s:3:"230";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:31;a:3:{s:2:"id";s:3:"231";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:32;a:3:{s:2:"id";s:3:"232";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:33;a:3:{s:2:"id";s:3:"233";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:34;a:3:{s:2:"id";s:3:"234";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:35;a:3:{s:2:"id";s:3:"235";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:36;a:3:{s:2:"id";s:3:"236";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:37;a:3:{s:2:"id";s:3:"237";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:38;a:3:{s:2:"id";s:3:"238";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}i:39;a:3:{s:2:"id";s:3:"239";s:6:"weight";s:4:"1265";s:5:"attr1";s:1:"0";}}}i:78;a:3:{s:8:"sphinxql";s:9:"show meta";s:10:"total_rows";i:6;s:4:"rows";a:5:{i:0;a:2:{s:13:"Variable_name";s:5:"total";s:5:"Value";s:2:"40";}i:1;a:2:{s:13:"Variable_name";s:11:"total_found";s:5:"Value";s:2:"40";}i:2;a:2:{s:13:"Variable_name";s:10:"keyword[0]";s:5:"Value";s:6:"badger";}i:3;a:2:{s:13:"Variable_name";s:7:"docs[0]";s:5:"Value";s:2:"40";}i:4;a:2:{s:13:"Variable_name";s:7:"hits[0]";s:5:"Value";s:3:"160";}}}}}sphinx-2.0.4-release/test/test_185/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_185/test.xml0000644000176700017710000000767611665427474020037 0ustar deogardeogar Smart attribute updates indexer { mem_limit = 16M } searchd { workers = threads binlog_path = # } source src { type = mysql sql_query = SELECT id, text, section, mva1 FROM test_table WHERE id<4 sql_attr_uint = section sql_attr_multi = uint mva1 from field mva1 sql_attr_multi = bigint mva1 from field mva1 } source src1 : src { sql_query = SELECT id, text, section, mva1 FROM test_table WHERE id<3 } source src2 : src { sql_query = SELECT id, text, section, mva1 FROM test_table WHERE id=3 } index dist_no { source = src path = /idx docinfo = extern charset_type = utf-8 min_word_len = 1 } index idx1 : dist_no { source = src1 path = /idx1 } index idx2 : dist_no { source = src2 path = /idx2 } index dist0 { type = distributed local = dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist1 { type = distributed local = idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist2 { type = distributed agent = :idx1 local = idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist3 { type = distributed local = idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist4 { type = distributed agent = :idx1 agent = :idx2 agent_connect_timeout = 1000 agent_query_timeout = 3000 } index dist5 { type = distributed agent = :dist_no agent_connect_timeout = 1000 agent_query_timeout = 3000 } index rt { type = rt path = /rt charset_type = utf-8 docinfo = extern rt_field = body rt_attr_multi = mva1 rt_attr_uint = gid rt_attr_multi = mva2 } insert into rt (id, gid, mva1, mva2, body) values (1, 11, (1, 1), (2), 'dummy'), (3, 33, (3, 3), (3), 'dummy1') update dist_no set mva1=(3,2, 1, 2), mva1=(1, 2), section=111 where match ('test1') select * from dist_no update dist0 set mva1=(4,5, 1, 2), mva1=(8, 7), section=112 where match ('test1') select * from dist0 update dist1 set mva1=(3,2, 1, 2), mva1=(9, 10), section=113 where match ('testt') and id>=2 select * from dist1 update dist2 set mva1=(3,2, 1, 2), mva1=(11, 12), section=114 where match ('testt') and id>=2 select * from dist2 update dist3 set mva1=(3,2, 1, 2), mva1=(13, 14), section=115 where gid match ('testt') and id>=2 select * from dist3 update dist4 set mva1=(3,2, 1, 2), mva1=(15, 16), section=116 where match ('testt') and id>=2 select * from dist4 update dist5 set mva1=(3,2, 1, 2), mva1=(17, 18), section=117 where match ('testt') and id>=2 select * from dist5 update rt set mva1=(3,2, 1, 2), gid=3212, mva2=(1, 2, 3, 4, 5, 6), mva2=(3,4,5) where id=1 select * from rt update rt set mva1=() where id=3 select * from rt CREATE TABLE `test_table` ( `id` int(11) DEFAULT NULL, `text` varchar (255) NOT NULL, `section` int(11) DEFAULT NULL, `mva1` varchar(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `text`, `section`, `mva1`) VALUES (1, 'test test1', 101, '1001'), (2, 'testt test2', 102, '1002 1023 4456'), (3, 'test test3', 103, '1003 1008 1010'), (4, 'testt test4', 104, '1004 1005 1006'); sphinx-2.0.4-release/test/test_185/model.bin0000644000176700017710000002321211665427474020110 0ustar deogardeogara:2:{i:0;a:19:{i:0;a:2:{s:8:"sphinxql";s:111:"insert into rt (id, gid, mva1, mva2, body) values (1, 11, (1, 1), (2), 'dummy'), (3, 33, (3, 3), (3), 'dummy1')";s:14:"total_affected";i:2;}i:1;a:2:{s:8:"sphinxql";s:83:"update dist_no set mva1=(3,2, 1, 2), mva1=(1, 2), section=111 where match ('test1')";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:21:"select * from dist_no";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"111";s:4:"mva1";s:3:"1,2";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:3;a:2:{s:8:"sphinxql";s:81:"update dist0 set mva1=(4,5, 1, 2), mva1=(8, 7), section=112 where match ('test1')";s:14:"total_affected";i:1;}i:4;a:3:{s:8:"sphinxql";s:19:"select * from dist0";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"112";s:4:"mva1";s:3:"7,8";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:5;a:2:{s:8:"sphinxql";s:92:"update dist1 set mva1=(3,2, 1, 2), mva1=(9, 10), section=113 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:19:"select * from dist1";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"113";s:4:"mva1";s:4:"9,10";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:7;a:2:{s:8:"sphinxql";s:93:"update dist2 set mva1=(3,2, 1, 2), mva1=(11, 12), section=114 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:8;a:3:{s:8:"sphinxql";s:19:"select * from dist2";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"114";s:4:"mva1";s:5:"11,12";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:9;a:3:{s:8:"sphinxql";s:97:"update dist3 set mva1=(3,2, 1, 2), mva1=(13, 14), section=115 where gid match ('testt') and id>=2";s:5:"error";s:112:"sphinxql: syntax error, unexpected MATCH, expecting BETWEEN (or 8 other tokens) near 'match ('testt') and id>=2'";s:5:"errno";i:1064;}i:10;a:3:{s:8:"sphinxql";s:19:"select * from dist3";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"114";s:4:"mva1";s:5:"11,12";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:11;a:2:{s:8:"sphinxql";s:93:"update dist4 set mva1=(3,2, 1, 2), mva1=(15, 16), section=116 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:12;a:3:{s:8:"sphinxql";s:19:"select * from dist4";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"116";s:4:"mva1";s:5:"15,16";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:13;a:2:{s:8:"sphinxql";s:93:"update dist5 set mva1=(3,2, 1, 2), mva1=(17, 18), section=117 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:14;a:3:{s:8:"sphinxql";s:19:"select * from dist5";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"112";s:4:"mva1";s:3:"7,8";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"117";s:4:"mva1";s:5:"17,18";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:15;a:2:{s:8:"sphinxql";s:90:"update rt set mva1=(3,2, 1, 2), gid=3212, mva2=(1, 2, 3, 4, 5, 6), mva2=(3,4,5) where id=1";s:14:"total_affected";i:1;}i:16;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:1:"3";s:4:"mva2";s:1:"3";}}}i:17;a:2:{s:8:"sphinxql";s:32:"update rt set mva1=() where id=3";s:14:"total_affected";i:1;}i:18;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:0:"";s:4:"mva2";s:1:"3";}}}}i:1;a:19:{i:0;a:2:{s:8:"sphinxql";s:111:"insert into rt (id, gid, mva1, mva2, body) values (1, 11, (1, 1), (2), 'dummy'), (3, 33, (3, 3), (3), 'dummy1')";s:14:"total_affected";i:2;}i:1;a:2:{s:8:"sphinxql";s:83:"update dist_no set mva1=(3,2, 1, 2), mva1=(1, 2), section=111 where match ('test1')";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:21:"select * from dist_no";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"111";s:4:"mva1";s:3:"1,2";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:3;a:2:{s:8:"sphinxql";s:81:"update dist0 set mva1=(4,5, 1, 2), mva1=(8, 7), section=112 where match ('test1')";s:14:"total_affected";i:1;}i:4;a:3:{s:8:"sphinxql";s:19:"select * from dist0";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"112";s:4:"mva1";s:3:"7,8";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"102";s:4:"mva1";s:14:"1002,1023,4456";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:5;a:2:{s:8:"sphinxql";s:92:"update dist1 set mva1=(3,2, 1, 2), mva1=(9, 10), section=113 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:19:"select * from dist1";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"113";s:4:"mva1";s:4:"9,10";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:7;a:2:{s:8:"sphinxql";s:93:"update dist2 set mva1=(3,2, 1, 2), mva1=(11, 12), section=114 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:8;a:3:{s:8:"sphinxql";s:19:"select * from dist2";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"114";s:4:"mva1";s:5:"11,12";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:9;a:3:{s:8:"sphinxql";s:97:"update dist3 set mva1=(3,2, 1, 2), mva1=(13, 14), section=115 where gid match ('testt') and id>=2";s:5:"error";s:112:"sphinxql: syntax error, unexpected MATCH, expecting BETWEEN (or 8 other tokens) near 'match ('testt') and id>=2'";s:5:"errno";i:1064;}i:10;a:3:{s:8:"sphinxql";s:19:"select * from dist3";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"114";s:4:"mva1";s:5:"11,12";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:11;a:2:{s:8:"sphinxql";s:93:"update dist4 set mva1=(3,2, 1, 2), mva1=(15, 16), section=116 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:12;a:3:{s:8:"sphinxql";s:19:"select * from dist4";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"101";s:4:"mva1";s:4:"1001";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"116";s:4:"mva1";s:5:"15,16";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:13;a:2:{s:8:"sphinxql";s:93:"update dist5 set mva1=(3,2, 1, 2), mva1=(17, 18), section=117 where match ('testt') and id>=2";s:14:"total_affected";i:1;}i:14;a:3:{s:8:"sphinxql";s:19:"select * from dist5";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:4:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:7:"section";s:3:"112";s:4:"mva1";s:3:"7,8";}i:1;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:7:"section";s:3:"117";s:4:"mva1";s:5:"17,18";}i:2;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:7:"section";s:3:"103";s:4:"mva1";s:14:"1003,1008,1010";}}}i:15;a:2:{s:8:"sphinxql";s:90:"update rt set mva1=(3,2, 1, 2), gid=3212, mva2=(1, 2, 3, 4, 5, 6), mva2=(3,4,5) where id=1";s:14:"total_affected";i:1;}i:16;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:1:"3";s:4:"mva2";s:1:"3";}}}i:17;a:2:{s:8:"sphinxql";s:32:"update rt set mva1=() where id=3";s:14:"total_affected";i:1;}i:18;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:5:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"3212";s:4:"mva1";s:5:"1,2,3";s:4:"mva2";s:5:"3,4,5";}i:1;a:5:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:2:"33";s:4:"mva1";s:0:"";s:4:"mva2";s:1:"3";}}}}}sphinx-2.0.4-release/test/test_003/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_003/test.xml0000644000176700017710000000402710744717627020006 0ustar deogardeogar prefix/infix indexing (part 3) indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, subject, body, author FROM test_table } index lj { source = srclj path = /lj charset_type = utf-8 min_word_len = 3 min_prefix_len = 0 min_prefix_len = 1 min_prefix_len = 3 min_infix_len = 0 min_infix_len = 1 min_infix_len = 3 enable_star = 0 enable_star = 1 } admin *earc* up* dmin rep pda I I* for CREATE TABLE `test_table` ( `id` int(11) NOT NULL default '0', `document_id` int(5) NOT NULL default '0', `subject` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `author` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES (1,1,'Problem with enable_star searches','Having star searches is great! Just what we needed','Maurice Makaay'), (2,2,'Problem with enable_star searches',' But the thing is, that I cannot search for authors anymore','admin'), (3,3,'Problem with enable_star searches','I will try to repro it here and update you','shodan') sphinx-2.0.4-release/test/test_003/model.bin0000644000176700017710000007343711015107150020062 0ustar deogardeogara:18:{i:0;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:1;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:2;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:3;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:4;a:1:{i:0;s:6:"failed";}i:5;a:1:{i:0;s:6:"failed";}i:6;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:4:"dmin";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"rep";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:7;a:1:{i:0;s:6:"failed";}i:8;a:1:{i:0;s:6:"failed";}i:9;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"earc";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:10;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"i*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:11;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:62:"Query word length is less than min prefix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min prefix length. word: 'i*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"i*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:12;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"i*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:13;a:1:{i:0;s:6:"failed";}i:14;a:1:{i:0;s:6:"failed";}i:15;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:5:"admin";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:5:"admin";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"*earc*";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:5:"query";s:6:"*earc*";}i:2;a:10:{s:5:"error";s:0:"";s:7:"warning";s:61:"Query word length is less than min infix length. word: 'up*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"up*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"up*";}i:3;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:4:"dmin";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:4:"dmin";}i:4;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"rep";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"rep";}i:5;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"pda";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:3:"pda";}i:6;a:9:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"query";s:1:"I";}i:7;a:10:{s:5:"error";s:0:"";s:7:"warning";s:60:"Query word length is less than min infix length. word: 'i*' ";s:6:"status";i:3;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:2:"i*";a:2:{s:4:"docs";s:1:"0";s:4:"hits";s:1:"0";}}s:5:"query";s:2:"I*";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:7:"subject";i:1;s:4:"body";i:2;s:6:"author";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:3:"for";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:5:"query";s:3:"for";}}i:16;a:1:{i:0;s:6:"failed";}i:17;a:1:{i:0;s:6:"failed";}}sphinx-2.0.4-release/test/test_084/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_084/test.xml0000644000176700017710000000172111323636205017777 0ustar deogardeogar merge vs phantom killer indexer { mem_limit = 16M } searchd { } source main { type = mysql sql_query = select 1 as id, 'phantom' as body, 2 as attr; sql_attr_uint = attr } source delta { type = mysql sql_query = SELECT * FROM test_table; sql_attr_uint = attr } index main { source = main path = /empty } index delta { source = delta path = /delta } CREATE TABLE test_table ( `id` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '', `attr` int(11) NOT NULL default '0' ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table VALUES ( 1, 'nophantom', 1 ), ( 2, 'phantom', 0 ) --merge main delta phantom sphinx-2.0.4-release/test/test_084/model.bin0000644000176700017710000000070111270362457020073 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:4:"attr";i:1;}s:7:"matches";a:1:{i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:4:"attr";s:1:"0";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"phantom";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"phantom";}}}sphinx-2.0.4-release/test/clean.sh0000755000176700017710000000053311430744557016356 0ustar deogardeogar#!/bin/sh # must be run from tests dir if test ! -f ubertest.php; then exit 0; fi # clean subdirs for i in test_* do if test ! -f "$i/test.xml"; then continue; fi rm -f "$i/report.txt" rm -fr "$i/Conf/" done # clean files rm -f data/*.sp* rm -f data/*.mvp rm -f data/*.meta data/*.lock data/*.kill data/*.ram rm -f data/binlog.* rm -f *.log sphinx-2.0.4-release/test/test_140/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_140/test.xml0000644000176700017710000000441511605620330017766 0ustar deogardeogar MVA and string via MySQL indexer { mem_limit = 16M } searchd { } source srcmain { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd sql_attr_string = str1 sql_attr_uint = tag sql_attr_multi = uint mva1 from field sql_attr_string = str2 sql_attr_multi = uint mva2 from field sql_attr_multi = bigint mva1 from field sql_attr_string = str2 sql_attr_multi = bigint mva2 from field } index main { source = srcmain path = /main140 charset_type = utf-8 } select * from main order by idd asc select * from main where match('main') order by idd asc select * from main where match('delta') order by idd asc select * from main where match('main | delta') order by idd asc CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `idd` int(11) NOT NULL default '0', `tag` int(11) NOT NULL default '0', `mva1` varchar(255) NOT NULL default '', `mva2` varchar(255) NOT NULL default '', `body` varchar(255) NOT NULL default '', `str1` varchar(255) NOT NULL default '', `str2` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 1, '', '1 2 3','main and delta', 'some attribute', 'more data are here' ), ( 2, 2, 0, '2 22 222', '', 'main and delta' , 'is it sunny', ''), ( 3, 3, 0, '3 33 333', '5 6 7','main and delta' , '', 'cool place' ), ( 4, 4, 1, '', '', 'delta', '', '' ), ( 5, 5, 1, '', '', 'delta', 'good stuff', 'step ahead' ), ( 6, 6, 0, '6', '', 'delta', 'reality real', 'reality augumented' ), ( 7, 7, 1, '7', '', 'main', 'how its going', 'well' ), ( 8, 8, 1, '8', '', 'main', 'its going...', '?!?!?!' ), ( 9, 9, 0, '1 10 100', '9 0 1','delta', '', '' ), (10, 10, 0, '2 20 200', '', 'delta', '', 'a' ), (11, 11, 0, '', '', 'delta', 'b', '' ) sphinx-2.0.4-release/test/test_140/model.bin0000644000176700017710000003275411605620330020066 0ustar deogardeogara:2:{i:0;a:4:{i:0;a:3:{s:8:"sphinxql";s:35:"select * from main order by idd asc";s:10:"total_rows";i:11;s:4:"rows";a:11:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:7;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}i:8;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:9;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:10;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}i:1;a:3:{s:8:"sphinxql";s:55:"select * from main where match('main') order by idd asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:4;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}}}i:2;a:3:{s:8:"sphinxql";s:56:"select * from main where match('delta') order by idd asc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:7;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1399";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:8;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1399";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}i:3;a:3:{s:8:"sphinxql";s:63:"select * from main where match('main | delta') order by idd asc";s:10:"total_rows";i:11;s:4:"rows";a:11:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1515";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:7;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1515";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}i:8;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:9;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1449";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:10;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1449";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}}i:1;a:4:{i:0;a:3:{s:8:"sphinxql";s:35:"select * from main order by idd asc";s:10:"total_rows";i:11;s:4:"rows";a:11:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:1:"1";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:1:"1";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:1:"1";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:1:"1";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:7;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:1:"1";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}i:8;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:1:"1";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:9;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:10;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}i:1;a:3:{s:8:"sphinxql";s:55:"select * from main where match('main') order by idd asc";s:10:"total_rows";i:5;s:4:"rows";a:5:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:4;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1530";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}}}i:2;a:3:{s:8:"sphinxql";s:56:"select * from main where match('delta') order by idd asc";s:10:"total_rows";i:9;s:4:"rows";a:9:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1399";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:7;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1399";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:8;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1399";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}i:3;a:3:{s:8:"sphinxql";s:63:"select * from main where match('main | delta') order by idd asc";s:10:"total_rows";i:11;s:4:"rows";a:11:{i:0;a:8:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"1";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:5:"1,2,3";s:4:"str1";s:14:"some attribute";s:4:"str2";s:18:"more data are here";}i:1;a:8:{s:2:"id";s:1:"2";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"2";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,22,222";s:4:"mva2";s:0:"";s:4:"str1";s:11:"is it sunny";s:4:"str2";s:0:"";}i:2;a:8:{s:2:"id";s:1:"3";s:6:"weight";s:4:"1465";s:3:"idd";s:1:"3";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"3,33,333";s:4:"mva2";s:5:"5,6,7";s:4:"str1";s:0:"";s:4:"str2";s:10:"cool place";}i:3;a:8:{s:2:"id";s:1:"4";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"4";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:4;a:8:{s:2:"id";s:1:"5";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"5";s:3:"tag";s:1:"1";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:10:"good stuff";s:4:"str2";s:10:"step ahead";}i:5;a:8:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"6";s:3:"tag";s:1:"0";s:4:"mva1";s:1:"6";s:4:"mva2";s:0:"";s:4:"str1";s:12:"reality real";s:4:"str2";s:18:"reality augumented";}i:6;a:8:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1515";s:3:"idd";s:1:"7";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"7";s:4:"mva2";s:0:"";s:4:"str1";s:13:"how its going";s:4:"str2";s:4:"well";}i:7;a:8:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1515";s:3:"idd";s:1:"8";s:3:"tag";s:1:"1";s:4:"mva1";s:1:"8";s:4:"mva2";s:0:"";s:4:"str1";s:12:"its going...";s:4:"str2";s:6:"?!?!?!";}i:8;a:8:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1449";s:3:"idd";s:1:"9";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"1,10,100";s:4:"mva2";s:5:"0,1,9";s:4:"str1";s:0:"";s:4:"str2";s:0:"";}i:9;a:8:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1449";s:3:"idd";s:2:"10";s:3:"tag";s:1:"0";s:4:"mva1";s:8:"2,20,200";s:4:"mva2";s:0:"";s:4:"str1";s:0:"";s:4:"str2";s:1:"a";}i:10;a:8:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1449";s:3:"idd";s:2:"11";s:3:"tag";s:1:"0";s:4:"mva1";s:0:"";s:4:"mva2";s:0:"";s:4:"str1";s:1:"b";s:4:"str2";s:0:"";}}}}}sphinx-2.0.4-release/test/test_127/0000755000176700017710000000000011724063141016271 5ustar deogardeogarsphinx-2.0.4-release/test/test_127/test.xml0000644000176700017710000001410111421075337017773 0ustar deogardeogar RT: repeated delete indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index rt { type = rt path = data/rt rt_attr_uint = id1 rt_field = title } insert into rt (id, id1, title) values (1, 1, 'test text') delete from rt where id=1 delete from rt where id=1 insert into rt (id, id1, title) values (2, 2, 'test text') delete from rt where id=2 delete from rt where id=2 set autocommit=0 insert into rt (id, id1, title) values (10, 10, 'test text all') insert into rt (id, id1, title) values (110, 110, 'test text all') commit insert into rt (id, id1, title) values (11, 11, 'test text all') insert into rt (id, id1, title) values (111, 111, 'test text all') commit insert into rt (id, id1, title) values (12, 12, 'test text all') insert into rt (id, id1, title) values (112, 112, 'test text all') commit insert into rt (id, id1, title) values (13, 13, 'test text all') insert into rt (id, id1, title) values (113, 113, 'test text all') commit insert into rt (id, id1, title) values (14, 14, 'test text all') insert into rt (id, id1, title) values (114, 114, 'test text all') commit insert into rt (id, id1, title) values (15, 15, 'test text all') insert into rt (id, id1, title) values (115, 115, 'test text all') commit insert into rt (id, id1, title) values (16, 16, 'test text all') insert into rt (id, id1, title) values (116, 116, 'test text all') commit insert into rt (id, id1, title) values (17, 17, 'test text all') insert into rt (id, id1, title) values (117, 117, 'test text all') commit insert into rt (id, id1, title) values (18, 18, 'test text all') insert into rt (id, id1, title) values (118, 118, 'test text all') commit insert into rt (id, id1, title) values (19, 19, 'test text all') insert into rt (id, id1, title) values (119, 119, 'test text all') commit insert into rt (id, id1, title) values (20, 20, 'test text all') insert into rt (id, id1, title) values (120, 120, 'test text all') commit insert into rt (id, id1, title) values (21, 21, 'test text all') insert into rt (id, id1, title) values (121, 121, 'test text all') commit insert into rt (id, id1, title) values (22, 22, 'test text all') insert into rt (id, id1, title) values (122, 122, 'test text all') commit insert into rt (id, id1, title) values (23, 23, 'test text all') insert into rt (id, id1, title) values (123, 123, 'test text all') commit insert into rt (id, id1, title) values (24, 24, 'test text all') insert into rt (id, id1, title) values (124, 124, 'test text all') commit insert into rt (id, id1, title) values (25, 25, 'test text all') insert into rt (id, id1, title) values (125, 125, 'test text all') commit insert into rt (id, id1, title) values (26, 26, 'test text all') insert into rt (id, id1, title) values (126, 126, 'test text all') commit insert into rt (id, id1, title) values (27, 27, 'test text all') insert into rt (id, id1, title) values (127, 127, 'test text all') commit insert into rt (id, id1, title) values (28, 28, 'test text all') insert into rt (id, id1, title) values (128, 128, 'test text all') commit insert into rt (id, id1, title) values (29, 29, 'test text all') insert into rt (id, id1, title) values (129, 129, 'test text all') commit insert into rt (id, id1, title) values (30, 30, 'test text all') insert into rt (id, id1, title) values (130, 130, 'test text all') commit insert into rt (id, id1, title) values (31, 31, 'test text all') insert into rt (id, id1, title) values (131, 131, 'test text all') commit insert into rt (id, id1, title) values (32, 32, 'test text all') insert into rt (id, id1, title) values (132, 132, 'test text all') commit insert into rt (id, id1, title) values (33, 33, 'test text all') insert into rt (id, id1, title) values (133, 133, 'test text all') commit insert into rt (id, id1, title) values (34, 34, 'test text all') insert into rt (id, id1, title) values (134, 134, 'test text all') commit insert into rt (id, id1, title) values (35, 35, 'test text all') insert into rt (id, id1, title) values (135, 135, 'test text all') commit insert into rt (id, id1, title) values (36, 36, 'test text all') insert into rt (id, id1, title) values (136, 136, 'test text all') commit select * from rt sphinx-2.0.4-release/test/test_127/model.bin0000644000176700017710000002460111455516446020102 0ustar deogardeogara:1:{i:0;a:89:{i:0;a:2:{s:8:"sphinxql";s:58:"insert into rt (id, id1, title) values (1, 1, 'test text')";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:25:"delete from rt where id=1";s:14:"total_affected";i:0;}i:2;a:2:{s:8:"sphinxql";s:25:"delete from rt where id=1";s:14:"total_affected";i:0;}i:3;a:2:{s:8:"sphinxql";s:58:"insert into rt (id, id1, title) values (2, 2, 'test text')";s:14:"total_affected";i:1;}i:4;a:2:{s:8:"sphinxql";s:25:"delete from rt where id=2";s:14:"total_affected";i:0;}i:5;a:2:{s:8:"sphinxql";s:25:"delete from rt where id=2";s:14:"total_affected";i:0;}i:6;a:2:{s:8:"sphinxql";s:16:"set autocommit=0";s:14:"total_affected";i:0;}i:7;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (10, 10, 'test text all')";s:14:"total_affected";i:1;}i:8;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (110, 110, 'test text all')";s:14:"total_affected";i:1;}i:9;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:10;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (11, 11, 'test text all')";s:14:"total_affected";i:1;}i:11;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (111, 111, 'test text all')";s:14:"total_affected";i:1;}i:12;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:13;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (12, 12, 'test text all')";s:14:"total_affected";i:1;}i:14;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (112, 112, 'test text all')";s:14:"total_affected";i:1;}i:15;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:16;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (13, 13, 'test text all')";s:14:"total_affected";i:1;}i:17;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (113, 113, 'test text all')";s:14:"total_affected";i:1;}i:18;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:19;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (14, 14, 'test text all')";s:14:"total_affected";i:1;}i:20;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (114, 114, 'test text all')";s:14:"total_affected";i:1;}i:21;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:22;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (15, 15, 'test text all')";s:14:"total_affected";i:1;}i:23;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (115, 115, 'test text all')";s:14:"total_affected";i:1;}i:24;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:25;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (16, 16, 'test text all')";s:14:"total_affected";i:1;}i:26;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (116, 116, 'test text all')";s:14:"total_affected";i:1;}i:27;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:28;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (17, 17, 'test text all')";s:14:"total_affected";i:1;}i:29;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (117, 117, 'test text all')";s:14:"total_affected";i:1;}i:30;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:31;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (18, 18, 'test text all')";s:14:"total_affected";i:1;}i:32;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (118, 118, 'test text all')";s:14:"total_affected";i:1;}i:33;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:34;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (19, 19, 'test text all')";s:14:"total_affected";i:1;}i:35;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (119, 119, 'test text all')";s:14:"total_affected";i:1;}i:36;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:37;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (20, 20, 'test text all')";s:14:"total_affected";i:1;}i:38;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (120, 120, 'test text all')";s:14:"total_affected";i:1;}i:39;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:40;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (21, 21, 'test text all')";s:14:"total_affected";i:1;}i:41;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (121, 121, 'test text all')";s:14:"total_affected";i:1;}i:42;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:43;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (22, 22, 'test text all')";s:14:"total_affected";i:1;}i:44;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (122, 122, 'test text all')";s:14:"total_affected";i:1;}i:45;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:46;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (23, 23, 'test text all')";s:14:"total_affected";i:1;}i:47;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (123, 123, 'test text all')";s:14:"total_affected";i:1;}i:48;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:49;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (24, 24, 'test text all')";s:14:"total_affected";i:1;}i:50;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (124, 124, 'test text all')";s:14:"total_affected";i:1;}i:51;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:52;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (25, 25, 'test text all')";s:14:"total_affected";i:1;}i:53;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (125, 125, 'test text all')";s:14:"total_affected";i:1;}i:54;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:55;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (26, 26, 'test text all')";s:14:"total_affected";i:1;}i:56;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (126, 126, 'test text all')";s:14:"total_affected";i:1;}i:57;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:58;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (27, 27, 'test text all')";s:14:"total_affected";i:1;}i:59;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (127, 127, 'test text all')";s:14:"total_affected";i:1;}i:60;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:61;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (28, 28, 'test text all')";s:14:"total_affected";i:1;}i:62;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (128, 128, 'test text all')";s:14:"total_affected";i:1;}i:63;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:64;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (29, 29, 'test text all')";s:14:"total_affected";i:1;}i:65;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (129, 129, 'test text all')";s:14:"total_affected";i:1;}i:66;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:67;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (30, 30, 'test text all')";s:14:"total_affected";i:1;}i:68;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (130, 130, 'test text all')";s:14:"total_affected";i:1;}i:69;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:70;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (31, 31, 'test text all')";s:14:"total_affected";i:1;}i:71;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (131, 131, 'test text all')";s:14:"total_affected";i:1;}i:72;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:73;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (32, 32, 'test text all')";s:14:"total_affected";i:1;}i:74;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (132, 132, 'test text all')";s:14:"total_affected";i:1;}i:75;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:76;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (33, 33, 'test text all')";s:14:"total_affected";i:1;}i:77;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (133, 133, 'test text all')";s:14:"total_affected";i:1;}i:78;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:79;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (34, 34, 'test text all')";s:14:"total_affected";i:1;}i:80;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (134, 134, 'test text all')";s:14:"total_affected";i:1;}i:81;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:82;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (35, 35, 'test text all')";s:14:"total_affected";i:1;}i:83;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (135, 135, 'test text all')";s:14:"total_affected";i:1;}i:84;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:85;a:2:{s:8:"sphinxql";s:64:"insert into rt (id, id1, title) values (36, 36, 'test text all')";s:14:"total_affected";i:1;}i:86;a:2:{s:8:"sphinxql";s:66:"insert into rt (id, id1, title) values (136, 136, 'test text all')";s:14:"total_affected";i:1;}i:87;a:2:{s:8:"sphinxql";s:6:"commit";s:14:"total_affected";i:0;}i:88;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:20;s:4:"rows";a:20:{i:0;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"id1";s:2:"10";}i:1;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"id1";s:2:"11";}i:2;a:3:{s:2:"id";s:2:"12";s:6:"weight";s:1:"1";s:3:"id1";s:2:"12";}i:3;a:3:{s:2:"id";s:2:"13";s:6:"weight";s:1:"1";s:3:"id1";s:2:"13";}i:4;a:3:{s:2:"id";s:2:"14";s:6:"weight";s:1:"1";s:3:"id1";s:2:"14";}i:5;a:3:{s:2:"id";s:2:"15";s:6:"weight";s:1:"1";s:3:"id1";s:2:"15";}i:6;a:3:{s:2:"id";s:2:"16";s:6:"weight";s:1:"1";s:3:"id1";s:2:"16";}i:7;a:3:{s:2:"id";s:2:"17";s:6:"weight";s:1:"1";s:3:"id1";s:2:"17";}i:8;a:3:{s:2:"id";s:2:"18";s:6:"weight";s:1:"1";s:3:"id1";s:2:"18";}i:9;a:3:{s:2:"id";s:2:"19";s:6:"weight";s:1:"1";s:3:"id1";s:2:"19";}i:10;a:3:{s:2:"id";s:2:"20";s:6:"weight";s:1:"1";s:3:"id1";s:2:"20";}i:11;a:3:{s:2:"id";s:2:"21";s:6:"weight";s:1:"1";s:3:"id1";s:2:"21";}i:12;a:3:{s:2:"id";s:2:"22";s:6:"weight";s:1:"1";s:3:"id1";s:2:"22";}i:13;a:3:{s:2:"id";s:2:"23";s:6:"weight";s:1:"1";s:3:"id1";s:2:"23";}i:14;a:3:{s:2:"id";s:2:"24";s:6:"weight";s:1:"1";s:3:"id1";s:2:"24";}i:15;a:3:{s:2:"id";s:2:"25";s:6:"weight";s:1:"1";s:3:"id1";s:2:"25";}i:16;a:3:{s:2:"id";s:2:"26";s:6:"weight";s:1:"1";s:3:"id1";s:2:"26";}i:17;a:3:{s:2:"id";s:2:"27";s:6:"weight";s:1:"1";s:3:"id1";s:2:"27";}i:18;a:3:{s:2:"id";s:2:"28";s:6:"weight";s:1:"1";s:3:"id1";s:2:"28";}i:19;a:3:{s:2:"id";s:2:"29";s:6:"weight";s:1:"1";s:3:"id1";s:2:"29";}}}}}sphinx-2.0.4-release/test/test_176/0000755000176700017710000000000011724063141016275 5ustar deogardeogarsphinx-2.0.4-release/test/test_176/test.xml0000644000176700017710000000224211557050344020003 0ustar deogardeogar ZONE pushdown vs min_prefix_len indexer { mem_limit = 20M } searchd { compat_sphinxql_magics = 0 preopen_indexes = 0 } source test { type = mysql sql_query = select * from test_table sql_attr_uint = gid } index test { source = test path = /test html_strip = 1 index_zones = zone* dict = keywords enable_star = 1 min_prefix_len = 0 min_prefix_len = 1 } create table test_table ( id int not null, gid int not null, title varchar(255) not null ); drop table if exists test_table; J\'entends le loup et le renard chanter.' );]]> "la belette" ZONE:zoneB "la belette" ZONE:nonZone "la belette" sphinx-2.0.4-release/test/test_176/model.bin0000644000176700017710000000517211557050344020101 0ustar deogardeogara:2:{i:0;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:100;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""la belette"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:"ZONE:zoneB "la belette"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:25:"ZONE:nonZone "la belette"";}}i:1;a:3:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:7:"matches";a:1:{i:100;a:2:{s:6:"weight";s:4:"2500";s:5:"attrs";a:1:{s:3:"gid";s:1:"1";}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:12:""la belette"";}i:1;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:23:"ZONE:zoneB "la belette"";}i:2;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:5:"title";}s:5:"attrs";a:1:{s:3:"gid";i:1;}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.001";s:5:"words";a:2:{s:2:"la";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"belette";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:25:"ZONE:nonZone "la belette"";}}}sphinx-2.0.4-release/test/test_147/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_147/test.xml0000644000176700017710000000715511605620330020001 0ustar deogardeogar multi-index queries vs mva and string attributes indexer { mem_limit = 16M } searchd { dist_threads = 0 dist_threads = 2 } source src_base { type = mysql } source src_1 : src_base { sql_query = SELECT id, attr1, mva1, mva2, str1, str2, text FROM test_table WHERE idx = 'i1'; sql_attr_multi = uint mva1 from field sql_attr_multi = uint mva2 from field sql_attr_string = str1 sql_attr_string = str2 sql_attr_uint = attr1 } source src_2 : src_base { sql_query = SELECT id, attr1, mva2, str1, str2, text FROM test_table WHERE idx = 'i2'; sql_attr_multi = uint mva2 from field sql_attr_string = str1 sql_attr_string = str2 sql_attr_uint = attr1 } source src_3 : src_base { sql_query = SELECT id, attr1, mva1, mva2, str2, text FROM test_table WHERE idx = 'i3'; sql_attr_multi = uint mva1 from field sql_attr_multi = uint mva2 from field sql_attr_string = str2 sql_attr_uint = attr1 } source src_1 : src_base { sql_query = SELECT id, attr1, mva1, mva2, str1, str2, text FROM test_table WHERE idx = 'i1'; sql_attr_multi = bigint mva1 from field sql_attr_multi = bigint mva2 from field sql_attr_string = str1 sql_attr_string = str2 sql_attr_uint = attr1 } source src_2 : src_base { sql_query = SELECT id, attr1, mva2, str1, str2, text FROM test_table WHERE idx = 'i2'; sql_attr_multi = bigint mva2 from field sql_attr_string = str1 sql_attr_string = str2 sql_attr_uint = attr1 } source src_3 : src_base { sql_query = SELECT id, attr1, mva1, mva2, str2, text FROM test_table WHERE idx = 'i3'; sql_attr_multi = bigint mva1 from field sql_attr_multi = bigint mva2 from field sql_attr_string = str2 sql_attr_uint = attr1 } index i1 { source = src_1 path = /i1 docinfo = extern charset_type = utf-8 } index i2 { source = src_2 path = /i2 docinfo = extern charset_type = utf-8 } index i3 { source = src_3 path = /i3 docinfo = extern charset_type = utf-8 } CREATE TABLE test_table ( id INT NOT NULL, attr1 INT NOT NULL, mva1 VARCHAR(255) NOT NULL, mva2 VARCHAR(255) NOT NULL, str1 VARCHAR(255) NOT NULL, str2 VARCHAR(255) NOT NULL, text VARCHAR(255) NOT NULL DEFAULT 'text', idx VARCHAR(255) NOT NULL ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (id, attr1, mva1, mva2, str1, str2, idx) VALUES ( 1, 10, '101, 101, 103', '110, 111, 112', 'some data 100', 'another data 110', 'i1' ), ( 2, 20, '201, 202, 203', '21, 211, 212', 'some data 200', 'another data 210', 'i1' ), ( 3, 30, '12, 302, 303', '310, 312, 313', 'some data 300', 'another data 310', 'i1' ), ( 1, 1000, '', '1000, 10000, 100000', 'aaa bbb', 'ccc ddd', 'i2' ), ( 2, 2000, '', '2000, 20000, 21', 'eee fff', 'ggg hhh', 'i2' ), ( 3, 3000, '', '3000, 30000, 300000', 'iii jjj', 'lll mmm', 'i2' ), ( 1, 1, '2, 3, 4', '5, 6, 7', '', 'second string attribute in the 1st row', 'i3' ), ( 2, 8, '9, 10, 11', '12, 13, 14', '', 'second string attribute in the 2nd row', 'i3' ), ( 3, 15, '12, 17, 18', '19, 20, 21', '', 'second string attribute in the 3d row', 'i3' ) SetMatchMode (SPH_MATCH_EXTENDED2); $results = array(); $q = "text"; $i = "*"; $client->ResetFilters(); $client->AddQuery ($q, $i); $client->AddQuery ($q, $i); $results = $client->RunQueries(); ]]> sphinx-2.0.4-release/test/test_147/model.bin0000644000176700017710000001622611605620330020071 0ustar deogardeogara:4:{i:0;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}}i:1;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}}i:2;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}}i:3;a:2:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:3:{s:5:"attr1";i:1;s:4:"mva2";i:1073741825;s:4:"str2";i:7;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"1";s:4:"mva2";a:3:{i:0;s:1:"5";i:1;s:1:"6";i:2;s:1:"7";}s:4:"str2";s:38:"second string attribute in the 1st row";}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:1:"8";s:4:"mva2";a:3:{i:0;s:2:"12";i:1;s:2:"13";i:2;s:2:"14";}s:4:"str2";s:38:"second string attribute in the 2nd row";}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:3:{s:5:"attr1";s:2:"15";s:4:"mva2";a:3:{i:0;s:2:"19";i:1;s:2:"20";i:2;s:2:"21";}s:4:"str2";s:37:"second string attribute in the 3d row";}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"text";a:2:{s:4:"docs";s:1:"9";s:4:"hits";s:1:"9";}}s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_029/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_029/test.xml0000644000176700017710000000451311323636205020000 0ustar deogardeogar multi-index queries indexer { mem_limit = 16M } searchd { } source srctest1 { type = mysql sql_query = SELECT document_id, attr1, attr2, body FROM test_table sql_attr_uint = attr1 sql_attr_uint = attr2 } source srctest2 { type = mysql sql_query = SELECT document_id, attr1, attr2, body FROM test_table sql_attr_uint = attr1:5 sql_attr_uint = attr2:5 } source srctest3 { type = mysql sql_query = SELECT document_id, attr2, attr3, body FROM test_table sql_attr_uint = attr2 sql_attr_uint = attr3 } source srctest4 { type = mysql sql_query = SELECT document_id, attr1, attr2, body FROM test_table sql_attr_uint = attr1 sql_attr_uint = attr2 } index test1 { source = srctest1 path = /test1 charset_type = utf-8 min_prefix_len = 1 } index test2 { source = srctest2 path = /test2 charset_type = utf-8 min_prefix_len = 1 } index test3 { source = srctest3 path = /test3 charset_type = utf-8 min_prefix_len = 1 } index test4 { source = srctest4 path = /test4 charset_type = utf-8 min_prefix_len = 1 } word word word word word word word word word CREATE TABLE `test_table` ( `document_id` int(11) NOT NULL default '0', `attr1` int(11) NOT NULL default '0', `attr2` int(11) NOT NULL default '0', `attr3` int(11) NOT NULL default '0', `attr4` int(11) NOT NULL default '0', `body` varchar(255) NOT NULL default '' ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 5, 9, 13, 'wordforms' ), ( 2, 2, 6, 10, 14, 'wordies' ), ( 3, 3, 7, 11, 15, 'words' ), ( 4, 4, 8, 12, 16, 'word' ) sphinx-2.0.4-release/test/test_029/model.bin0000644000176700017710000001307411023610403020060 0ustar deogardeogara:1:{i:0;a:9:{i:0;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"1";s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"2";s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"3";s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"4";s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}i:1;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:2:"12";s:4:"hits";s:2:"12";}}s:5:"query";s:4:"word";}i:2;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.003";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:2:"16";s:4:"hits";s:2:"16";}}s:5:"query";s:4:"word";}i:3;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}i:4;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.002";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:2:"12";s:4:"hits";s:2:"12";}}s:5:"query";s:4:"word";}i:5;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}i:6;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:1:{s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:1:{s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}i:7;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"1";s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"2";s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"3";s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"4";s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}i:8;a:11:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"body";}s:5:"attrs";a:2:{s:5:"attr1";i:1;s:5:"attr2";i:1;}s:7:"matches";a:4:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"1";s:5:"attr2";s:1:"5";}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"2";s:5:"attr2";s:1:"6";}}i:3;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"3";s:5:"attr2";s:1:"7";}}i:4;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:5:"attr1";s:1:"4";s:5:"attr2";s:1:"8";}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.001";s:5:"words";a:1:{s:4:"word";a:2:{s:4:"docs";s:1:"8";s:4:"hits";s:1:"8";}}s:5:"query";s:4:"word";}}}sphinx-2.0.4-release/test/test_081/0000755000176700017710000000000011724063141016270 5ustar deogardeogarsphinx-2.0.4-release/test/test_081/words.txt0000644000176700017710000000003111243776120020165 0ustar deogardeogara the and of tin woodman sphinx-2.0.4-release/test/test_081/test.xml0000644000176700017710000000711011323414741017771 0ustar deogardeogar hitless indexes indexer { mem_limit = 16M } searchd { } source test { type = mysql sql_query = select * from sph_test; } index none { source = test path = /none } index some { source = test path = /some hitless_words = /words.txt } index all { source = test path = /all hitless_words = all } create table sph_test ( id int not null, text varchar(255) not null ); drop table if exists sph_test; insert into sph_test values ( 1, 'The Tin Woodman gave a sigh of satisfaction and lowered his axe, which he leaned against the tree.' ), ( 2, 'The Tin Woodman appeared to think deeply for a moment.' ), ( 3, 'The Tin Woodman had asked Dorothy to put the oil-can in her basket.' ); tin woodman tin woodman tin woodman tin woodman tin woodman tin woodman lowered lowered lowered "tin woodman" "tin woodman" "tin woodman" "and lowered" "and lowered" "and lowered" "tin woodman"~2 "tin woodman"~2 "tin woodman"~2 "lowered his axe" "lowered his axe" "lowered his axe" "and lowered his axe" "and lowered his axe" "and lowered his axe" "tin woodman"/1 "tin woodman"/1 "tin woodman"/1 ^the ^the ^the ^tin ^tin ^tin basket basket basket basket$ basket$ basket$ sphinx-2.0.4-release/test/test_081/model.bin0000644000176700017710000004345211243776120020076 0ustar deogardeogara:1:{i:0;a:39:{i:0;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:1;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:2;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:3;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:4;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:5;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:1:"2";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:11:"tin woodman";}i:6;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"lowered";}i:7;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"lowered";}i:8;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"lowered";}i:9;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""tin woodman"";}i:10;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""tin woodman"";}i:11;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""tin woodman"";}i:12;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"2680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"and";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""and lowered"";}i:13;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=1, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""and lowered"";}i:14;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:13:""and lowered"";}i:15;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"~2";}i:16;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"~2";}i:17;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=2)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"~2";}i:18;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""lowered his axe"";}i:19;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:3:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""lowered his axe"";}i:20;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=3)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:17:""lowered his axe"";}i:21;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"4680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:3:"and";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""and lowered his axe"";}i:22;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:1;a:2:{s:6:"weight";s:4:"3680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.001";s:5:"words";a:4:{s:7:"lowered";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"his";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"axe";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}s:3:"and";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""and lowered his axe"";}i:23;a:11:{s:5:"error";s:0:"";s:7:"warning";s:68:"can't create phrase node, hitlists unavailable (hitlists=0, nodes=4)";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:21:""and lowered his axe"";}i:24;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"2319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"/1";}i:25;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"/1";}i:26;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:2:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}s:7:"woodman";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:15:""tin woodman"/1";}i:27;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^the";}i:28;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^the";}i:29;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:1;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1252";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"the";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"5";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^the";}i:30;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:5:"total";s:1:"0";s:11:"total_found";s:1:"0";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^tin";}i:31;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^tin";}i:32;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:2;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}i:3;a:2:{s:6:"weight";s:4:"1319";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:3:"tin";a:2:{s:4:"docs";s:1:"3";s:4:"hits";s:1:"3";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:4:"^tin";}i:33;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"basket";}i:34;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"basket";}i:35;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:6:"basket";}i:36;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"basket$";}i:37;a:13:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"basket$";}i:38;a:13:{s:5:"error";s:0:"";s:7:"warning";s:43:"hitlist unavailable, position limit ignored";s:6:"status";i:3;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:0:{}s:7:"matches";a:1:{i:3;a:2:{s:6:"weight";s:4:"1680";s:5:"attrs";a:0:{}}}s:5:"total";s:1:"1";s:11:"total_found";s:1:"1";s:4:"time";s:5:"0.000";s:5:"words";a:1:{s:6:"basket";a:2:{s:4:"docs";s:1:"1";s:4:"hits";s:1:"1";}}s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:7:"basket$";}}}sphinx-2.0.4-release/test/test_137/0000755000176700017710000000000011724063141016272 5ustar deogardeogarsphinx-2.0.4-release/test/test_137/test.xml0000644000176700017710000000140011706326425017775 0ustar deogardeogar snippets vs NEAR searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index test { source = test path = /test charset_type = utf-8 } select 1; true, "around"=> 2 ); $results[] = $client->BuildExcerpts($docs, 'test', '(one) NEAR/35 ("five")', $opts ); // regression crash on 0 docs snippet $results[] = $client->BuildExcerpts(array(), 'test', '(one) NEAR/35 ("five")', $opts ); $results[] = $client->GetLastError(); ]]> sphinx-2.0.4-release/test/test_137/model.bin0000644000176700017710000000024311706326425020072 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:3:{i:0;a:1:{i:0;s:57:"Was one rabiit. Now there are five rabbits.";}i:1;b:0;i:2;s:38:"searchd error: invalid entries count 0";}}}sphinx-2.0.4-release/test/test_010/0000755000176700017710000000000011724063141016260 5ustar deogardeogarsphinx-2.0.4-release/test/test_010/test.xml0000644000176700017710000000565211605620330017766 0ustar deogardeogar bitfields + MVA indexer { mem_limit = 16M } searchd { } source srclj { type = mysql sql_query = SELECT id, adtext, heading, postcode, lng, lat, section, transmission_id FROM test_table sql_attr_float = lng sql_attr_float = lat sql_attr_uint = section:8 sql_attr_uint = transmission_id:5 sql_attr_multi = uint make_id from query; SELECT id, make_id FROM test_table sql_attr_multi = bigint make_id from query; SELECT id, make_id FROM test_table } index lj { source = srclj path = /lj docinfo = extern charset_type = utf-8 min_word_len = 1 charset_type = sbcs } section make_id CREATE TABLE `test_table` ( `id` int(11) NOT NULL auto_increment, `section` int(11) NOT NULL, `system_id` tinyint(4) NOT NULL, `adtext` varchar(255) NOT NULL, `heading` varchar(500) NOT NULL, `price` int(11) NOT NULL default '0', `postcode` varchar(10) NOT NULL, `gre` int(11) NOT NULL, `grn` int(11) NOT NULL, `str_at1` varchar(255) NOT NULL, `str_at2` varchar(255) NOT NULL, `str_at3` varchar(255) NOT NULL, `str_at4` varchar(255) NOT NULL, `str_at5` varchar(255) NOT NULL, `int_at1` int(11) default NULL, `int_at2` int(11) default NULL, `int_at3` int(11) default NULL, `int_at4` int(11) default NULL, `int_at5` int(11) default NULL, `float_at1` float default NULL, `float_at2` float default NULL, `float_at3` float default NULL, `lng` float NOT NULL default '0', `lat` float NOT NULL default '0', `make_id` int(11) NOT NULL, `transmission_id` tinyint(4) NOT NULL, PRIMARY KEY (`id`) ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` (`id`, `section`, `system_id`, `adtext`, `heading`, `price`, `postcode`, `gre`, `grn`, `str_at1`, `str_at2`, `str_at3`, `str_at4`, `str_at5`, `int_at1`, `int_at2`, `int_at3`, `int_at4`, `int_at5`, `float_at1`, `float_at2`, `float_at3`, `lng`, `lat`, `make_id`, `transmission_id`) VALUES (1, 1, 2, 'FORD', 'Ford KA', 2790, 'EN3 5BT', 535000, 197400, 'Ford', 'KA', 'Grey', 'Diesel', '', 18662, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0798578, 0.937717, 8, 1), (2, 1, 0, 'until', 'Vauxhall Corsa', 5800, 'BN42 4RN', 524000, 106100, 'Vauxhall', 'Corsa', 'Red', 'Petrol', '', 19296, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0799989, 0.891975, 5, 0), (211250, 0, 1, 'Quattro Roadster', 'Audi TT', 13995, 'E9 7DG', 535600, 184200, '', '', '', '', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, -0.0721455, 0.926761, 29, 1); sphinx-2.0.4-release/test/test_010/model.bin0000644000176700017710000000413611605620330020053 0ustar deogardeogara:2:{i:0;a:1:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:5:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:15:"transmission_id";i:1;s:7:"make_id";i:1073741825;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:15:"transmission_id";s:1:"1";s:7:"make_id";a:1:{i:0;s:1:"8";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:7:"section";s:1:"1";s:15:"transmission_id";s:1:"0";s:7:"make_id";a:1:{i:0;s:1:"5";}}}i:211250;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:15:"transmission_id";s:1:"1";s:7:"make_id";a:1:{i:0;s:2:"29";}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}i:1;a:1:{i:0;a:12:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:3:{i:0;s:6:"adtext";i:1;s:7:"heading";i:2;s:8:"postcode";}s:5:"attrs";a:5:{s:3:"lng";i:5;s:3:"lat";i:5;s:7:"section";i:1;s:15:"transmission_id";i:1;s:7:"make_id";i:1073741825;}s:7:"matches";a:3:{i:1;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.0798577964305877685546875;s:3:"lat";d:0.937717020511627197265625;s:7:"section";s:1:"1";s:15:"transmission_id";s:1:"1";s:7:"make_id";a:1:{i:0;s:1:"8";}}}i:2;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.079998902976512908935546875;s:3:"lat";d:0.891974985599517822265625;s:7:"section";s:1:"1";s:15:"transmission_id";s:1:"0";s:7:"make_id";a:1:{i:0;s:1:"5";}}}i:211250;a:2:{s:6:"weight";s:1:"1";s:5:"attrs";a:5:{s:3:"lng";d:-0.072145499289035797119140625;s:3:"lat";d:0.926760971546173095703125;s:7:"section";s:1:"0";s:15:"transmission_id";s:1:"1";s:7:"make_id";a:1:{i:0;s:2:"29";}}}}s:5:"total";s:1:"3";s:11:"total_found";s:1:"3";s:4:"time";s:5:"0.000";s:8:"resarray";i:0;s:8:"roundoff";i:0;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/synonyms.txt0000644000176700017710000000010510762120665017363 0ustar deogardeogarMS Windows => Windows Microsoft Windows => Windows Windows => Windowssphinx-2.0.4-release/test/test_152/0000755000176700017710000000000011724063141016267 5ustar deogardeogarsphinx-2.0.4-release/test/test_152/test.xml0000644000176700017710000000701211660574606020004 0ustar deogardeogar tail hits vs UNIT node indexer { mem_limit = 16M } searchd { } source src1 { type = mysql sql_query = SELECT * FROM test_table where gid=1 sql_attr_uint = gid } index test1 { source = src1 path = /test1 charset_type = utf-8 html_strip = 1 index_sp = 1 } source src2 { type = mysql sql_query = SELECT * FROM test_table2 sql_attr_uint = gid } index test2 { source = src2 path = /test2 docinfo = extern charset_type = utf-8 html_strip = 1 index_sp = 1 index_zones = z_* } source src3 { type = mysql sql_query = SELECT * FROM test_table3 sql_attr_uint = gid } index test3 { source = src3 path = /test3 docinfo = extern charset_type = utf-8 } CREATE TABLE test_table ( id INTEGER NOT NULL, gid INTEGER NOT NULL, title VARCHAR(16384) NOT NULL ) DROP TABLE IF EXISTS test_table insert into test_table values ( 1, 1, 'aa bb cc' ) insert into test_table values ( 2, 1, 'aa bb' ) insert into test_table values ( 3, 1, 'aa bb' ) insert into test_table values ( 4, 1, CONCAT(REPEAT('aa bb. ', 512), 'aa bb. ')) insert into test_table values ( 5, 1, 'cc' ) insert into test_table values ( 6, 1, 'cc' ) CREATE TABLE test_table2 ( id INTEGER NOT NULL, gid INTEGER NOT NULL, title VARCHAR(6048) NOT NULL ) DROP TABLE IF EXISTS test_table2 insert into test_table2 values ( 1, 1, 'aa' ) insert into test_table2 values ( 2, 1, 'aa' ) insert into test_table2 values ( 3, 1, 'aa' ) insert into test_table2 values ( 4, 1, 'aa' ) insert into test_table2 values ( 5, 1, 'aa' ) aa' )]]> aa' )]]> aa' )]]> ', CONCAT(REPEAT('aa ', 512), ' ')) )]]> aa ' )]]> aa ' )]]> CREATE TABLE test_table3 ( id BIGINT NOT NULL, gid INTEGER NOT NULL, title VARCHAR(6048) NOT NULL ) DROP TABLE IF EXISTS test_table3 insert into test_table3 values ( 3452816845, 3, 'match1 and match2 vs match3' ) select * from test1 where match('aa SENTENCE bb') select * from test1 where match('(aa SENTENCE bb) cc') CALL SNIPPETS ('dummy1 match1 dummy2 match2', 'test1', ' ( match1 NEAR/3 match2 ) | ^missed1 | missed2 ', 1 as query_mode) select * from test2 where match('(ZONE:z_1 aa )') order by id asc sphinx-2.0.4-release/test/test_152/model.bin0000644000176700017710000000316111602663261020066 0ustar deogardeogara:1:{i:0;a:5:{i:0;a:3:{s:8:"sphinxql";s:49:"select * from test1 where match('aa SENTENCE bb')";s:10:"total_rows";i:4;s:4:"rows";a:4:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"2466";s:3:"gid";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"2";s:6:"weight";s:4:"2466";s:3:"gid";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"3";s:6:"weight";s:4:"2466";s:3:"gid";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"4";s:6:"weight";s:4:"2426";s:3:"gid";s:1:"1";}}}i:1;a:3:{s:8:"sphinxql";s:54:"select * from test1 where match('(aa SENTENCE bb) cc')";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"3488";s:3:"gid";s:1:"1";}}}i:2;a:3:{s:8:"sphinxql";s:122:"CALL SNIPPETS ('dummy1 match1 dummy2 match2', 'test1', ' ( match1 NEAR/3 match2 ) | ^missed1 | missed2 ', 1 as query_mode)";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:1:{s:7:"snippet";s:41:"dummy1 match1 dummy2 match2";}}}i:3;a:3:{s:8:"sphinxql";s:65:"select * from test2 where match('(ZONE:z_1 aa )') order by id asc";s:10:"total_rows";i:6;s:4:"rows";a:6:{i:0;a:3:{s:2:"id";s:1:"6";s:6:"weight";s:4:"1280";s:3:"gid";s:1:"1";}i:1;a:3:{s:2:"id";s:1:"7";s:6:"weight";s:4:"1280";s:3:"gid";s:1:"1";}i:2;a:3:{s:2:"id";s:1:"8";s:6:"weight";s:4:"1280";s:3:"gid";s:1:"1";}i:3;a:3:{s:2:"id";s:1:"9";s:6:"weight";s:4:"1018";s:3:"gid";s:1:"1";}i:4;a:3:{s:2:"id";s:2:"10";s:6:"weight";s:4:"1280";s:3:"gid";s:1:"1";}i:5;a:3:{s:2:"id";s:2:"11";s:6:"weight";s:4:"1280";s:3:"gid";s:1:"1";}}}i:4;a:3:{s:8:"sphinxql";s:69:"select * from test3 where match ( ' match1 | ( match5 << match2) ' ) ";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:10:"3452816845";s:6:"weight";s:4:"1500";s:3:"gid";s:1:"3";}}}}}sphinx-2.0.4-release/test/test_057/0000755000176700017710000000000011724063141016273 5ustar deogardeogarsphinx-2.0.4-release/test/test_057/test.xml0000644000176700017710000000426611323636205020006 0ustar deogardeogar snippets vs boundaries, windows-1251 searchd { } source test { type = mysql sql_query = SELECT 1, 'text'; } index index_sbcs { source = test path = /index_sbcs morphology = stem_enru min_word_len = 3 min_prefix_len = 0 min_infix_len = 0 phrase_boundary = . phrase_boundary_step = 100 charset_type = sbcs } select 1; BuildExcerpts($docs, 'index_sbcs', 'øåë', $opts); // 2 $docs = array(); $docs[0] = 'Ñ äðóãîì â ñàäó ÿ ñèäåë. Ìîêðûé ñêëîíèëñÿ áàìáóê. ß øåë ïî ñêëîíó Ôóäçè. Ñòàðóþ æåíùèíó ÿ. Øåë íå çíàÿ êóäà. '; $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = false; $opts['exact_phrase'] = true; $opts['limit'] = 25; $results[] = $client->BuildExcerpts($docs, 'index_sbcs', 'øåë ïî ñêëîíó', $opts); // 3 $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = true; $opts['exact_phrase'] = true; $opts['limit'] = 25; $results[] = $client->BuildExcerpts($docs, 'index_sbcs', 'øåë ïî ñêëîíó', $opts); // 4 $opts = array(); $opts['use_boundaries'] = true; $opts['single_passage'] = false; $opts['chunk_separator'] = '###'; $opts['weight_order'] = true; $opts['exact_phrase'] = false; $opts['limit'] = 75; $results[] = $client->BuildExcerpts($docs, 'index_sbcs', 'øåë ïî ñêëîíó', $opts); ]]> sphinx-2.0.4-release/test/test_057/model.bin0000644000176700017710000000056611153676616020111 0ustar deogardeogara:1:{i:0;a:1:{i:0;a:4:{i:0;a:2:{i:0;s:36:"### ß øåë ïî ñêëîíó Ôóäçè.###";i:1;s:32:"ß øåë ïî ñêëîíó Ôóäçè.###";}i:1;a:1:{i:0;s:36:"### ß øåë ïî ñêëîíó Ôóäçè.###";}i:2;a:1:{i:0;s:36:"### ß øåë ïî ñêëîíó Ôóäçè.###";}i:3;a:1:{i:0;s:107:"### ß øåë ïî ñêëîíó Ôóäçè.### Ìîêðûé ñêëîíèëñÿ áàìáóê.### Øåë íå çíàÿ êóäà.###";}}}}sphinx-2.0.4-release/test/test_113/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_113/test.xml0000644000176700017710000000300211421075337017764 0ustar deogardeogar catching errors in multiqueries indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } source srctest { type = mysql sql_query = SELECT id, best_seller, attributes_id, text FROM test_table sql_attr_uint = best_seller sql_attr_uint = attributes_id } index products { source = srctest path = /test } CREATE TABLE test_table ( id integer primary key not null auto_increment, best_seller int not null default 0, attributes_id int not null default 0, text varchar(256) ); DROP TABLE IF EXISTS test_table; INSERT INTO test_table (best_seller, attributes_id, text) VALUES ( 1, 1, 'text1' ), ( 2, 1, 'text2' ), ( 3, 2, 'text3' ), ( 4, 2, 'text4' ); SetArrayResult (true); $client->SetMatchMode (SPH_MATCH_EXTENDED2); $client->SetSortMode (SPH_SORT_EXTENDED, 'best_seller ASC'); $index = 'products'; $query = ""; // First Query $client->AddQuery ($query, $index); //Second query - note that manufacturers_id is absent in the schema, so it must fail the query, but *NOT* crash the searchd. $client->SetSelect('manufacturers_id'); $client->SetGroupBy ('manufacturers_id', SPH_GROUPBY_ATTR, 'manufacturers_id desc'); $client->AddQuery ($query, $index); //Run queries $results = $client->RunQueries(); ]]> sphinx-2.0.4-release/test/test_113/model.bin0000644000176700017710000000163211571611211020055 0ustar deogardeogara:1:{i:0;a:2:{i:0;a:10:{s:5:"error";s:0:"";s:7:"warning";s:0:"";s:6:"status";i:0;s:6:"fields";a:1:{i:0;s:4:"text";}s:5:"attrs";a:2:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;}s:7:"matches";a:4:{i:0;a:3:{s:2:"id";i:1;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:1;s:13:"attributes_id";i:1;}}i:1;a:3:{s:2:"id";i:2;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:2;s:13:"attributes_id";i:1;}}i:2;a:3:{s:2:"id";i:3;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:3;s:13:"attributes_id";i:2;}}i:3;a:3:{s:2:"id";i:4;s:6:"weight";s:1:"1";s:5:"attrs";a:2:{s:11:"best_seller";i:4;s:13:"attributes_id";i:2;}}}s:5:"total";s:1:"4";s:11:"total_found";s:1:"4";s:4:"time";s:5:"0.000";s:5:"query";s:0:"";}i:1;a:4:{s:5:"error";s:101:"index products: parse error: unknown identifier 'manufacturers_id' (not an attribute, not a function)";s:7:"warning";s:0:"";s:6:"status";i:1;s:5:"query";s:0:"";}}}sphinx-2.0.4-release/test/test_122/0000755000176700017710000000000011724063141016264 5ustar deogardeogarsphinx-2.0.4-release/test/test_122/test.xml0000644000176700017710000000245011421075337017772 0ustar deogardeogar RT: string attrs indexer { mem_limit = 16M } searchd { workers = threads binlog_path = } index rt { type = rt path = data/rt rt_attr_uint = gid rt_attr_string = author rt_field = title } index rt_multy_str { type = rt path = data/rt_multy_str rt_field = title rt_attr_uint = gid rt_attr_string = place rt_attr_string = some rt_attr_string = time } insert into rt (id, gid, author, title) values (2, 123, 'Lewis Carrol', 'Through the looking glass') insert into rt (id, gid, author, title) values (3, 234, 'O.Henry', 'Cabbages and Kings') select * from rt insert into rt_multy_str values (1, 'lewis carrol', 1001, 'rabit hole', 'looking glass', 'at 1890') insert into rt_multy_str values (10, 'mad max', 1010, 'post future', 'fuel less world', 'at 2100') insert into rt_multy_str values (11, 'n. perumov', 1011, 'realm of meln', 'war of magican', 'at 650') select * from rt_multy_str sphinx-2.0.4-release/test/test_122/model.bin0000644000176700017710000000317111455516446020074 0ustar deogardeogara:1:{i:0;a:7:{i:0;a:2:{s:8:"sphinxql";s:100:"insert into rt (id, gid, author, title) values (2, 123, 'Lewis Carrol', 'Through the looking glass')";s:14:"total_affected";i:1;}i:1;a:2:{s:8:"sphinxql";s:88:"insert into rt (id, gid, author, title) values (3, 234, 'O.Henry', 'Cabbages and Kings')";s:14:"total_affected";i:1;}i:2;a:3:{s:8:"sphinxql";s:16:"select * from rt";s:10:"total_rows";i:2;s:4:"rows";a:2:{i:0;a:4:{s:2:"id";s:1:"2";s:6:"weight";s:1:"1";s:3:"gid";s:3:"123";s:6:"author";s:12:"Lewis Carrol";}i:1;a:4:{s:2:"id";s:1:"3";s:6:"weight";s:1:"1";s:3:"gid";s:3:"234";s:6:"author";s:7:"O.Henry";}}}i:3;a:2:{s:8:"sphinxql";s:99:"insert into rt_multy_str values (1, 'lewis carrol', 1001, 'rabit hole', 'looking glass', 'at 1890')";s:14:"total_affected";i:1;}i:4;a:2:{s:8:"sphinxql";s:98:"insert into rt_multy_str values (10, 'mad max', 1010, 'post future', 'fuel less world', 'at 2100')";s:14:"total_affected";i:1;}i:5;a:2:{s:8:"sphinxql";s:101:"insert into rt_multy_str values (11, 'n. perumov', 1011, 'realm of meln', 'war of magican', 'at 650')";s:14:"total_affected";i:1;}i:6;a:3:{s:8:"sphinxql";s:26:"select * from rt_multy_str";s:10:"total_rows";i:3;s:4:"rows";a:3:{i:0;a:6:{s:2:"id";s:1:"1";s:6:"weight";s:1:"1";s:3:"gid";s:4:"1001";s:5:"place";s:10:"rabit hole";s:4:"some";s:13:"looking glass";s:4:"time";s:7:"at 1890";}i:1;a:6:{s:2:"id";s:2:"10";s:6:"weight";s:1:"1";s:3:"gid";s:4:"1010";s:5:"place";s:11:"post future";s:4:"some";s:15:"fuel less world";s:4:"time";s:7:"at 2100";}i:2;a:6:{s:2:"id";s:2:"11";s:6:"weight";s:1:"1";s:3:"gid";s:4:"1011";s:5:"place";s:13:"realm of meln";s:4:"some";s:14:"war of magican";s:4:"time";s:6:"at 650";}}}}}sphinx-2.0.4-release/test/test_120/0000755000176700017710000000000011724063141016262 5ustar deogardeogarsphinx-2.0.4-release/test/test_120/test.xml0000644000176700017710000000171311403153121017755 0ustar deogardeogar html_stripper vs sql_file_field indexer { mem_limit = 16M } searchd { } source srctest { type = mysql sql_query = SELECT * FROM test_table sql_attr_uint = idd sql_file_field = title } index test_idx { source = srctest path = /test charset_type = utf-8 html_strip = 1 } CREATE TABLE test_table ( id INTEGER NOT NULL, idd INTEGER NOT NULL, title VARCHAR(255) NOT NULL ) DROP TABLE IF EXISTS `test_table` INSERT INTO `test_table` VALUES ( 1, 1, 'html_120.txt' ), ( 2, 2, '' ) select * from test_idx where match( 'div' ) select * from test_idx where match( 'span' ) select * from test_idx where match( 'end' ) sphinx-2.0.4-release/test/test_120/model.bin0000644000176700017710000000061511455516446020072 0ustar deogardeogara:1:{i:0;a:3:{i:0;a:2:{s:8:"sphinxql";s:43:"select * from test_idx where match( 'div' )";s:10:"total_rows";i:0;}i:1;a:2:{s:8:"sphinxql";s:44:"select * from test_idx where match( 'span' )";s:10:"total_rows";i:0;}i:2;a:3:{s:8:"sphinxql";s:43:"select * from test_idx where match( 'end' )";s:10:"total_rows";i:1;s:4:"rows";a:1:{i:0;a:3:{s:2:"id";s:1:"1";s:6:"weight";s:4:"1643";s:3:"idd";s:1:"1";}}}}}sphinx-2.0.4-release/sphinx05.sln0000644000176700017710000001173511320433132016136 0ustar deogardeogarMicrosoft Visual Studio Solution File, Format Version 9.00 # Visual Studio 2005 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "indexer", "win\indexer05.vcproj", "{405619C7-CC22-4FB8-9237-B196CB897355}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libsphinx", "win\libsphinx05.vcproj", "{6A1685DE-0265-4243-965F-96CB53EBBCA6}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "search", "win\search05.vcproj", "{571DC41A-2665-476C-ABED-3899324E19AB}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "searchd", "win\searchd05.vcproj", "{0BBD34CB-5891-477F-B665-3D7C9FC22A02}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tests", "win\tests05.vcproj", "{B47166A1-4827-4D80-97E3-743BDE61146F}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "spelldump", "win\spelldump05.vcproj", "{AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "indextool", "win\indextool05.vcproj", "{6A78A67D-A743-4594-858A-A4F1C536A8C1}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "testrt", "win\testrt05.vcproj", "{651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}" ProjectSection(ProjectDependencies) = postProject {6A1685DE-0265-4243-965F-96CB53EBBCA6} = {6A1685DE-0265-4243-965F-96CB53EBBCA6} EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 Release|Win32 = Release|Win32 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|Win32.ActiveCfg = Debug|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Debug|Win32.Build.0 = Debug|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|Win32.ActiveCfg = Release|Win32 {405619C7-CC22-4FB8-9237-B196CB897355}.Release|Win32.Build.0 = Release|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|Win32.ActiveCfg = Debug|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Debug|Win32.Build.0 = Debug|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|Win32.ActiveCfg = Release|Win32 {6A1685DE-0265-4243-965F-96CB53EBBCA6}.Release|Win32.Build.0 = Release|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|Win32.ActiveCfg = Debug|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Debug|Win32.Build.0 = Debug|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|Win32.ActiveCfg = Release|Win32 {571DC41A-2665-476C-ABED-3899324E19AB}.Release|Win32.Build.0 = Release|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|Win32.ActiveCfg = Debug|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Debug|Win32.Build.0 = Debug|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|Win32.ActiveCfg = Release|Win32 {0BBD34CB-5891-477F-B665-3D7C9FC22A02}.Release|Win32.Build.0 = Release|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|Win32.ActiveCfg = Debug|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Debug|Win32.Build.0 = Debug|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|Win32.ActiveCfg = Release|Win32 {B47166A1-4827-4D80-97E3-743BDE61146F}.Release|Win32.Build.0 = Release|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|Win32.ActiveCfg = Debug|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Debug|Win32.Build.0 = Debug|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|Win32.ActiveCfg = Release|Win32 {AE5236EB-62AC-4AD9-81A3-F4BDC85D3876}.Release|Win32.Build.0 = Release|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|Win32.ActiveCfg = Debug|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Debug|Win32.Build.0 = Debug|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|Win32.ActiveCfg = Release|Win32 {6A78A67D-A743-4594-858A-A4F1C536A8C1}.Release|Win32.Build.0 = Release|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|Win32.ActiveCfg = Debug|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Debug|Win32.Build.0 = Debug|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|Win32.ActiveCfg = Release|Win32 {651CEFE5-9BDB-4003-9D35-8D05FA2C06D7}.Release|Win32.Build.0 = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection EndGlobal sphinx-2.0.4-release/misc/0000755000176700017710000000000011724063141014675 5ustar deogardeogarsphinx-2.0.4-release/misc/build.cmd0000644000176700017710000001024611657217047016477 0ustar deogardeogar@echo off set URL=http://sphinxsearch.googlecode.com/svn/trunk set REL=1.11 set PATH=C:\Program Files\Microsoft Visual Studio 8\Common7\IDE;%PATH%; set ICONVROOT=C:\Bin\Iconv set EXPATROOT=C:\Bin\Expat set MYSQLROOT=C:\Program Files\MySQL\MySQL Server 5.0 set PGSQLROOT=C:\Program Files\PostgreSQL\8.3 if "%1" EQU "" ( echo *** FATAL: specify build tag as 1st argument (eg. build.cmd rc2 or build.cmd r2345^). exit ) else ( set TAG=-%1 ) rmdir /s /q sphinxbuild 2>nul mkdir sphinxbuild cd sphinxbuild if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: failed to create build directory. exit ) svn co %URL% checkout if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: checkout error. exit ) call checkout\src\svnxrev.cmd checkout echo #define SPHINX_TAG "%TAG%" >> checkout\src\sphinxversion.h perl -i.bak -p -e "s/(_TAGREV \").*(r\d+\")/\1\2/g;" checkout\src\sphinxversion.h rmdir /s /q checkout\.svn @rem strip "release" tag from zip names if "%1" EQU "release" ( set TAG= ) @rem ========================================== @rem === regular build and common packaging === @rem ========================================== cd checkout devenv sphinx05.sln /Rebuild Release if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: build error. exit ) del /q bin\release\test*.* cd .. mkdir common for %%i in (api doc contrib) do ( svn export checkout\%%i common\%%i ) for %%i in (COPYING INSTALL sphinx.conf.in sphinx-min.conf.in example.sql) do ( copy checkout\%%i common\%%i ) set BASE=sphinx-%REL%%TAG%-win32 mkdir %BASE% mkdir %BASE%\bin copy checkout\bin\release\*.exe %BASE%\bin copy "%ICONVROOT%\bin\iconv.dll" %BASE%\bin copy "%EXPATROOT%\libs\libexpat.dll" %BASE%\bin copy "%MYSQLROOT%\bin\libmysql.dll" %BASE%\bin xcopy /q /s common\* %BASE% pkzip25 -add %BASE%.zip -dir %BASE%\* move %BASE%.zip .. set PDBS=sphinx-%REL%%TAG%-win32-debug mkdir %PDBS% mkdir %PDBS%\regular copy checkout\bin\release\*.pdb %PDBS%\regular @rem =================== @rem === pgsql build === @rem =================== cd checkout perl -i.bak -p -e "s/USE_PGSQL\s+\d/USE_PGSQL 1/g;" src\sphinx.h devenv sphinx05.sln /Rebuild Release if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: build error. exit ) del /q bin\release\test*.* cd .. set BASE=sphinx-%REL%%TAG%-win32-pgsql mkdir %BASE% mkdir %BASE%\bin copy checkout\bin\release\*.exe %BASE%\bin for %%i in (comerr32.dll gssapi32.dll iconv.dll k5sprt32.dll krb5_32.dll libeay32.dll libiconv2.dll libintl3.dll libpq.dll ssleay32.dll msvcr71.dll) do ( copy "%PGSQLROOT%\bin\%%i" %BASE%\bin ) copy "%EXPATROOT%\libs\libexpat.dll" %BASE%\bin copy "%MYSQLROOT%\bin\libmysql.dll" %BASE%\bin xcopy /q /s common\* %BASE% pkzip25 -add %BASE%.zip -dir %BASE%\* move %BASE%.zip .. mkdir %PDBS%\pgsql copy checkout\bin\release\*.pdb %PDBS%\pgsql @rem ======================= @rem === id64-full build === @rem ======================= cd checkout perl -i.bak -p -e "s/USE_LIBSTEMMER\s+\d/USE_LIBSTEMMER 1/g;" src\sphinx.h perl -i.bak -p -e "s/USE_64BIT\s+\d/USE_64BIT 1/g;" src\sphinx.h wget http://snowball.tartarus.org/dist/libstemmer_c.tgz gunzip libstemmer_c.tgz tar xvf libstemmer_c.tar cd libstemmer_c devenv libstemmer_c.sln /Rebuild Release if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: build error. exit ) cd .. devenv sphinx05.sln /Rebuild Release if %ERRORLEVEL% NEQ 0 ( echo *** FATAL: build error. exit ) del /q bin\release\test*.* cd .. set BASE=sphinx-%REL%%TAG%-win32-id64-full mkdir %BASE% mkdir %BASE%\bin copy checkout\bin\release\*.exe %BASE%\bin for %%i in (comerr32.dll gssapi32.dll iconv.dll k5sprt32.dll krb5_32.dll libeay32.dll libiconv2.dll libintl3.dll libpq.dll ssleay32.dll msvcr71.dll) do ( copy "%PGSQLROOT%\bin\%%i" %BASE%\bin ) copy "%EXPATROOT%\libs\libexpat.dll" %BASE%\bin copy "%MYSQLROOT%\bin\libmysql.dll" %BASE%\bin xcopy /q /s common\* %BASE% pkzip25 -add %BASE%.zip -dir %BASE%\* move %BASE%.zip .. mkdir %PDBS%\id64full copy checkout\bin\release\*.pdb %PDBS%\id64full @rem ============================= @rem === debug symbols archive === @rem ============================== pkzip25 -add %PDBS%.zip -dir %PDBS%\* move %PDBS%.zip .. sphinx-2.0.4-release/misc/resolve.py0000644000176700017710000000203711572144754016743 0ustar deogardeogar# # $Id$ # import sys, re if len(sys.argv)!=3: print 'Usage: python resolve.py BACKTRACE SYMBOLS' sys.exit(0) def myopen(name): if name == '-': return sys.stdin fh = open(name, 'r') if not fh: print 'FATAL: failed to open %s' % name sys.exit(1) return fh syms = [] fp = myopen(sys.argv[2]) for line in fp.readlines(): line = line.rstrip() match = re.match('([0-9a-fA-F]+) \w ', line) if match: addr = int(match.group(1), 16) name = line[len(match.group(0)):] syms.append([addr, name]) fp.close() fp = myopen(sys.argv[1]) for line in fp.readlines(): line = line.rstrip() # skip plain boring log entries if re.search('^\[\w+\s+\w+\s+\d+\s+\d+:\d+:\d+\.\d+ \d+\] \[\d+\] \S', line): continue # resolve symbols, if any match = re.search('\[0x([0-9a-fA-F]+)\]', line) if match: addr = int(match.group(1), 16) resolved = '???' for i in range(len(syms)-1): if syms[i][0]<=addr and addr dict.sql mysql -u root test < dict.sql indexer --config suggest.conf --all searchd --config suggest.conf php suggest.php --query sphynx --eof-- sphinx-2.0.4-release/misc/suggest/suggest.php0000644000176700017710000000670611617056150020564 0ustar deogardeogarSetMatchMode ( SPH_MATCH_EXTENDED2 ); $cl->SetRankingMode ( SPH_RANK_WORDCOUNT ); $cl->SetFilterRange ( "len", $len-$delta, $len+$delta ); $cl->SetSelect ( "*, @weight+$delta-abs(len-$len) AS myrank" ); $cl->SetSortMode ( SPH_SORT_EXTENDED, "myrank DESC, freq DESC" ); $cl->SetArrayResult ( true ); // pull top-N best trigram matches and run them through Levenshtein $res = $cl->Query ( $query, "suggest", 0, TOP_COUNT ); if ( !$res || !$res["matches"] ) return false; if ( SUGGEST_DEBUG ) { print "--- DEBUG START ---\n"; foreach ( $res["matches"] as $match ) { $w = $match["keyword"]; $myrank = @$match["attrs"]["myrank"]; if ( $myrank ) $myrank = ", myrank=$myrank"; $levdist = levenshtein ( $keyword, $w ); print "id=$match[id], weight=$match[weight], freq={$match[attrs][freq]}{$myrank}, word=$w, levdist=$levdist\n"; } print "--- DEBUG END ---\n"; } // further restrict trigram matches with a sane Levenshtein distance limit foreach ( $res["matches"] as $match ) { $suggested = $match["attrs"]["keyword"]; if ( levenshtein ( $keyword, $suggested )<=LEVENSHTEIN_THRESHOLD ) return $suggested; } return $keyword; } /// main if ( $_SERVER["argc"]<2 ) { die ( "usage:\n" . "php suggest.php --builddict\treads stopwords from stdin, prints SQL dump of the dictionary to stdout\n" . "php suggest.php --query WORD\tqueries Sphinx, prints suggestion\n" ); } if ( $_SERVER["argv"][1]=="--builddict" ) { $in = fopen ( "php://stdin", "r" ); $out = fopen ( "php://stdout", "w+" ); BuildDictionarySQL ( $out, $in ); } if ( $_SERVER["argv"][1]=="--query" ) { mysql_connect ( "localhost", "root", "" ) or die ( "mysql_connect() failed: ".mysql_error() ); mysql_select_db ( "test" ) or die ( "mysql_select_db() failed: ".mysql_error() ); $keyword = $_SERVER["argv"][2]; printf ( "keyword: %s\nsuggestion: %s\n", $keyword, MakeSuggestion($keyword) ); } sphinx-2.0.4-release/config/0000755000176700017710000000000011724063141015207 5ustar deogardeogarsphinx-2.0.4-release/config/config.h.in0000644000176700017710000001712411621022603017231 0ustar deogardeogar/* config/config.h.in. Generated from configure.ac by autoheader. */ /* Define to be the name of the compiler. */ #undef COMPILER /* Define to 1 if you have the `backtrace' function. */ #undef HAVE_BACKTRACE /* Define to 1 if you have the `backtrace_symbols' function. */ #undef HAVE_BACKTRACE_SYMBOLS /* Define to 1 if you have the `clock_gettime' function. */ #undef HAVE_CLOCK_GETTIME /* Define to 1 if you have the `dlerror' function. */ #undef HAVE_DLERROR /* Define to 1 if you have the `dlopen' function. */ #undef HAVE_DLOPEN /* Define to 1 if you don't have `vprintf' but do have `_doprnt.' */ #undef HAVE_DOPRNT /* Define to 1 if you have the `dup2' function. */ #undef HAVE_DUP2 /* Define to 1 if you have the header file. */ #undef HAVE_EXECINFO_H /* Define to 1 if you have the header file. */ #undef HAVE_FCNTL_H /* Define to 1 if you have the `fork' function. */ #undef HAVE_FORK /* Define if F_SETLKW is defined in fcntl.h */ #undef HAVE_F_SETLKW /* Define to 1 if you have the `gethostbyname' function. */ #undef HAVE_GETHOSTBYNAME /* Define to 1 if you have the `gettimeofday' function. */ #undef HAVE_GETTIMEOFDAY /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* Define to 1 if you have the `dl' library (-ldl). */ #undef HAVE_LIBDL /* Define to 1 if you have the `rt' library (-lrt). */ #undef HAVE_LIBRT /* Define to 1 if you have the header file. */ #undef HAVE_LIMITS_H /* Define if LOCK_EX is defined in sys/file.h */ #undef HAVE_LOCK_EX /* Define to 1 if you have the `logf' function. */ #undef HAVE_LOGF /* Define to 1 if your system has a GNU libc compatible `malloc' function, and to 0 otherwise. */ #undef HAVE_MALLOC /* Define to 1 if you have the `memmove' function. */ #undef HAVE_MEMMOVE /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the `memset' function. */ #undef HAVE_MEMSET /* Define to 1 if you have the header file. */ #undef HAVE_NETDB_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_H /* Define to 1 if you have the `pread' function. */ #undef HAVE_PREAD /* Define to 1 if you have the header file. */ #undef HAVE_PTHREAD_H /* Define to 1 if you have the `pthread_mutex_timedlock' function. */ #undef HAVE_PTHREAD_MUTEX_TIMEDLOCK /* Define to 1 if your system has a GNU libc compatible `realloc' function, and to 0 otherwise. */ #undef HAVE_REALLOC /* Define to 1 if you have the `select' function. */ #undef HAVE_SELECT /* Define to 1 if you have the `socket' function. */ #undef HAVE_SOCKET /* Define to 1 if `stat' has the bug that it succeeds when given the zero-length file name argument. */ #undef HAVE_STAT_EMPTY_STRING_BUG /* Define to 1 if stdbool.h conforms to C99. */ #undef HAVE_STDBOOL_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the `strcasecmp' function. */ #undef HAVE_STRCASECMP /* Define to 1 if you have the `strchr' function. */ #undef HAVE_STRCHR /* Define to 1 if you have the `strerror' function. */ #undef HAVE_STRERROR /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the `strncasecmp' function. */ #undef HAVE_STRNCASECMP /* Define to 1 if you have the `strstr' function. */ #undef HAVE_STRSTR /* Define to 1 if you have the `strtol' function. */ #undef HAVE_STRTOL /* Define to 1 if you have the header file. */ #undef HAVE_SYS_FILE_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SELECT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SOCKET_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TIME_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have that is POSIX.1 compatible. */ #undef HAVE_SYS_WAIT_H /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to 1 if you have the `vfork' function. */ #undef HAVE_VFORK /* Define to 1 if you have the header file. */ #undef HAVE_VFORK_H /* Define to 1 if you have the `vprintf' function. */ #undef HAVE_VPRINTF /* Define to 1 if `fork' works. */ #undef HAVE_WORKING_FORK /* Define to 1 if `vfork' works. */ #undef HAVE_WORKING_VFORK /* Define to 1 if the system has the type `_Bool'. */ #undef HAVE__BOOL /* whether 2nd arg to iconv() is const ptr */ #undef ICONV_INBUF_CONST /* Define to 1 if `lstat' dereferences a symlink specified with a trailing slash. */ #undef LSTAT_FOLLOWS_SLASHED_SYMLINK /* Full name OS */ #undef OS_UNAME /* Name of package */ #undef PACKAGE /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the home page for this package. */ #undef PACKAGE_URL /* Define to the version of this package. */ #undef PACKAGE_VERSION /* Define as the return type of signal handlers (`int' or `void'). */ #undef RETSIGTYPE /* Define to the type of arg 1 for `select'. */ #undef SELECT_TYPE_ARG1 /* Define to the type of args 2, 3 and 4 for `select'. */ #undef SELECT_TYPE_ARG234 /* Define to the type of arg 5 for `select'. */ #undef SELECT_TYPE_ARG5 /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Define to 1 if you can safely include both and . */ #undef TIME_WITH_SYS_TIME /* whether unaligned RAM access is possible */ #undef UNALIGNED_RAM_ACCESS /* 64-bit document and word IDs */ #undef USE_64BIT /* define to use expat XML library */ #undef USE_LIBEXPAT /* define to use iconv library */ #undef USE_LIBICONV /* libstemmer support */ #undef USE_LIBSTEMMER /* little-endian */ #undef USE_LITTLE_ENDIAN /* Define to 1 if you want to compile with MySQL support */ #undef USE_MYSQL /* define to use ODBC library */ #undef USE_ODBC /* Define to 1 if you want to compile with PostgreSQL support */ #undef USE_PGSQL /* define to use POSIX Syslog for logging */ #undef USE_SYSLOG /* define to use Zlib */ #undef USE_ZLIB /* Version number of package */ #undef VERSION /* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel). */ #if defined AC_APPLE_UNIVERSAL_BUILD # if defined __BIG_ENDIAN__ # define WORDS_BIGENDIAN 1 # endif #else # ifndef WORDS_BIGENDIAN # undef WORDS_BIGENDIAN # endif #endif /* Define to empty if `const' does not conform to ANSI C. */ #undef const /* Define to `__inline__' or `__inline' if that's what the C compiler calls it, or to nothing if 'inline' is not supported under any name. */ #ifndef __cplusplus #undef inline #endif /* Define to rpl_malloc if the replacement function should be used. */ #undef malloc /* Define to `long int' if does not define. */ #undef off_t /* Define to `int' if does not define. */ #undef pid_t /* Define to rpl_realloc if the replacement function should be used. */ #undef realloc /* Define to `unsigned int' if does not define. */ #undef size_t /* Define as `fork' if `vfork' does not work. */ #undef vfork sphinx-2.0.4-release/config/install-sh0000755000176700017710000002202110441541645017215 0ustar deogardeogar#!/bin/sh # install - install a program, script, or datafile scriptversion=2005-05-14.22 # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the # following copyright and license. # # Copyright (C) 1994 X Consortium # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN # AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- # TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # Except as contained in this notice, the name of the X Consortium shall not # be used in advertising or otherwise to promote the sale, use or other deal- # ings in this Software without prior written authorization from the X Consor- # tium. # # # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent # `make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. It can only install one file at a time, a restriction # shared with many OS's install programs. # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. doit="${DOITPROG-}" # put in absolute paths if you don't have them in your path; or use env. vars. mvprog="${MVPROG-mv}" cpprog="${CPPROG-cp}" chmodprog="${CHMODPROG-chmod}" chownprog="${CHOWNPROG-chown}" chgrpprog="${CHGRPPROG-chgrp}" stripprog="${STRIPPROG-strip}" rmprog="${RMPROG-rm}" mkdirprog="${MKDIRPROG-mkdir}" chmodcmd="$chmodprog 0755" chowncmd= chgrpcmd= stripcmd= rmcmd="$rmprog -f" mvcmd="$mvprog" src= dst= dir_arg= dstarg= no_target_directory= usage="Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... In the 1st form, copy SRCFILE to DSTFILE. In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. In the 4th, create DIRECTORIES. Options: -c (ignored) -d create directories instead of installing files. -g GROUP $chgrpprog installed files to GROUP. -m MODE $chmodprog installed files to MODE. -o USER $chownprog installed files to USER. -s $stripprog installed files. -t DIRECTORY install into DIRECTORY. -T report an error if DSTFILE is a directory. --help display this help and exit. --version display version info and exit. Environment variables override the default commands: CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG " while test -n "$1"; do case $1 in -c) shift continue;; -d) dir_arg=true shift continue;; -g) chgrpcmd="$chgrpprog $2" shift shift continue;; --help) echo "$usage"; exit $?;; -m) chmodcmd="$chmodprog $2" shift shift continue;; -o) chowncmd="$chownprog $2" shift shift continue;; -s) stripcmd=$stripprog shift continue;; -t) dstarg=$2 shift shift continue;; -T) no_target_directory=true shift continue;; --version) echo "$0 $scriptversion"; exit $?;; *) # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. test -n "$dir_arg$dstarg" && break # Otherwise, the last argument is the destination. Remove it from $@. for arg do if test -n "$dstarg"; then # $@ is not empty: it contains at least $arg. set fnord "$@" "$dstarg" shift # fnord fi shift # arg dstarg=$arg done break;; esac done if test -z "$1"; then if test -z "$dir_arg"; then echo "$0: no input file specified." >&2 exit 1 fi # It's OK to call `install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi for src do # Protect names starting with `-'. case $src in -*) src=./$src ;; esac if test -n "$dir_arg"; then dst=$src src= if test -d "$dst"; then mkdircmd=: chmodcmd= else mkdircmd=$mkdirprog fi else # Waiting for this to be detected by the "$cpprog $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if test ! -f "$src" && test ! -d "$src"; then echo "$0: $src does not exist." >&2 exit 1 fi if test -z "$dstarg"; then echo "$0: no destination specified." >&2 exit 1 fi dst=$dstarg # Protect names starting with `-'. case $dst in -*) dst=./$dst ;; esac # If destination is a directory, append the input filename; won't work # if double slashes aren't ignored. if test -d "$dst"; then if test -n "$no_target_directory"; then echo "$0: $dstarg: Is a directory" >&2 exit 1 fi dst=$dst/`basename "$src"` fi fi # This sed command emulates the dirname command. dstdir=`echo "$dst" | sed -e 's,/*$,,;s,[^/]*$,,;s,/*$,,;s,^$,.,'` # Make sure that the destination directory exists. # Skip lots of stat calls in the usual case. if test ! -d "$dstdir"; then defaultIFS=' ' IFS="${IFS-$defaultIFS}" oIFS=$IFS # Some sh's can't handle IFS=/ for some reason. IFS='%' set x `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'` shift IFS=$oIFS pathcomp= while test $# -ne 0 ; do pathcomp=$pathcomp$1 shift if test ! -d "$pathcomp"; then $mkdirprog "$pathcomp" # mkdir can fail with a `File exist' error in case several # install-sh are creating the directory concurrently. This # is OK. test -d "$pathcomp" || exit fi pathcomp=$pathcomp/ done fi if test -n "$dir_arg"; then $doit $mkdircmd "$dst" \ && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dst"; } else dstfile=`basename "$dst"` # Make a couple of temp file names in the proper directory. dsttmp=$dstdir/_inst.$$_ rmtmp=$dstdir/_rm.$$_ # Trap to clean up those temp files at exit. trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 trap '(exit $?); exit' 1 2 13 15 # Copy the file name to the temp name. $doit $cpprog "$src" "$dsttmp" && # and set any options; do chmod last to preserve setuid bits. # # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ && { test -z "$chmodcmd" || $doit $chmodcmd "$dsttmp"; } && # Now rename the file to the real destination. { $doit $mvcmd -f "$dsttmp" "$dstdir/$dstfile" 2>/dev/null \ || { # The rename failed, perhaps because mv can't rename something else # to itself, or perhaps because mv is so ancient that it does not # support -f. # Now remove or move aside any old file at destination location. # We try this two ways since rm can't unlink itself on some # systems and the destination file might be busy for other # reasons. In this case, the final cleanup might fail but the new # file should still install successfully. { if test -f "$dstdir/$dstfile"; then $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null \ || $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null \ || { echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2 (exit 1); exit 1 } else : fi } && # Now rename the file to the real destination. $doit $mvcmd "$dsttmp" "$dstdir/$dstfile" } } fi || { (exit 1); exit 1; } done # The final little trick to "correctly" pass the exit status to the exit trap. { (exit 0); exit 0 } # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: sphinx-2.0.4-release/config/missing0000755000176700017710000002540610441541645016622 0ustar deogardeogar#! /bin/sh # Common stub for a few missing GNU programs while installing. scriptversion=2005-06-08.21 # Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005 # Free Software Foundation, Inc. # Originally by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try \`$0 --help' for more information" exit 1 fi run=: # In the cases where this matters, `missing' is being run in the # srcdir already. if test -f configure.ac; then configure_ac=configure.ac else configure_ac=configure.in fi msg="missing on your system" case "$1" in --run) # Try to run requested program, and just exit if it succeeds. run= shift "$@" && exit 0 # Exit code 63 means version mismatch. This often happens # when the user try to use an ancient version of a tool on # a file that requires a minimum version. In this case we # we should proceed has if the program had been absent, or # if --run hadn't been passed. if test $? = 63; then run=: msg="probably too old" fi ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an error status if there is no known handling for PROGRAM. Options: -h, --help display this help and exit -v, --version output version information and exit --run try to run the given command, and emulate it if it fails Supported PROGRAM values: aclocal touch file \`aclocal.m4' autoconf touch file \`configure' autoheader touch file \`config.h.in' automake touch all \`Makefile.in' files bison create \`y.tab.[ch]', if possible, from existing .[ch] flex create \`lex.yy.c', if possible, from existing .c help2man touch the output file lex create \`lex.yy.c', if possible, from existing .c makeinfo touch the output file tar try tar, gnutar, gtar, then tar without non-portable flags yacc create \`y.tab.[ch]', if possible, from existing .[ch] Send bug reports to ." exit $? ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit $? ;; -*) echo 1>&2 "$0: Unknown \`$1' option" echo 1>&2 "Try \`$0 --help' for more information" exit 1 ;; esac # Now exit if we have it, but it failed. Also exit now if we # don't have it and --version was passed (most likely to detect # the program). case "$1" in lex|yacc) # Not GNU programs, they don't have --version. ;; tar) if test -n "$run"; then echo 1>&2 "ERROR: \`tar' requires --run" exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then exit 1 fi ;; *) if test -z "$run" && ($1 --version) > /dev/null 2>&1; then # We have it, but it failed. exit 1 elif test "x$2" = "x--version" || test "x$2" = "x--help"; then # Could not run --version or --help. This is probably someone # running `$TOOL --version' or `$TOOL --help' to check whether # $TOOL exists and not knowing $TOOL uses missing. exit 1 fi ;; esac # If it does not exist, or fails to run (possibly an outdated version), # try to emulate it. case "$1" in aclocal*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." touch aclocal.m4 ;; autoconf) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." touch configure ;; autoheader) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acconfig.h' or \`${configure_ac}'. You might want to install the \`Autoconf' and \`GNU m4' packages. Grab them from any GNU archive site." files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` test -z "$files" && files="config.h" touch_files= for f in $files; do case "$f" in *:*) touch_files="$touch_files "`echo "$f" | sed -e 's/^[^:]*://' -e 's/:.*//'`;; *) touch_files="$touch_files $f.in";; esac done touch $touch_files ;; automake*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. You might want to install the \`Automake' and \`Perl' packages. Grab them from any GNU archive site." find . -type f -name Makefile.am -print | sed 's/\.am$/.in/' | while read f; do touch "$f"; done ;; autom4te) echo 1>&2 "\ WARNING: \`$1' is needed, but is $msg. You might have modified some files without having the proper tools for further handling them. You can get \`$1' as part of \`Autoconf' from any GNU archive site." file=`echo "$*" | sed -n 's/.*--output[ =]*\([^ ]*\).*/\1/p'` test -z "$file" && file=`echo "$*" | sed -n 's/.*-o[ ]*\([^ ]*\).*/\1/p'` if test -f "$file"; then touch $file else test -z "$file" || exec >$file echo "#! /bin/sh" echo "# Created by GNU Automake missing as a replacement of" echo "# $ $@" echo "exit 0" chmod +x $file exit 1 fi ;; bison|yacc) echo 1>&2 "\ WARNING: \`$1' $msg. You should only need it if you modified a \`.y' file. You may need the \`Bison' package in order for those modifications to take effect. You can get \`Bison' from any GNU archive site." rm -f y.tab.c y.tab.h if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.y) SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.c fi SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" y.tab.h fi ;; esac fi if [ ! -f y.tab.h ]; then echo >y.tab.h fi if [ ! -f y.tab.c ]; then echo 'main() { return 0; }' >y.tab.c fi ;; lex|flex) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.l' file. You may need the \`Flex' package in order for those modifications to take effect. You can get \`Flex' from any GNU archive site." rm -f lex.yy.c if [ $# -ne 1 ]; then eval LASTARG="\${$#}" case "$LASTARG" in *.l) SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` if [ -f "$SRCFILE" ]; then cp "$SRCFILE" lex.yy.c fi ;; esac fi if [ ! -f lex.yy.c ]; then echo 'main() { return 0; }' >lex.yy.c fi ;; help2man) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a dependency of a manual page. You may need the \`Help2man' package in order for those modifications to take effect. You can get \`Help2man' from any GNU archive site." file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then file=`echo "$*" | sed -n 's/.*--output=\([^ ]*\).*/\1/p'` fi if [ -f "$file" ]; then touch $file else test -z "$file" || exec >$file echo ".ab help2man is required to generate this page" exit 1 fi ;; makeinfo) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.texi' or \`.texinfo' file, or any other file indirectly affecting the aspect of the manual. The spurious call might also be the consequence of using a buggy \`make' (AIX, DU, IRIX). You might want to install the \`Texinfo' package or the \`GNU make' package. Grab either from any GNU archive site." # The file to touch is that specified with -o ... file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` if test -z "$file"; then # ... or it is the one specified with @setfilename ... infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` file=`sed -n '/^@setfilename/ { s/.* \([^ ]*\) *$/\1/; p; q; }' $infile` # ... or it is derived from the source name (dir/f.texi becomes f.info) test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info fi # If the file does not exist, the user really needs makeinfo; # let's fail without touching anything. test -f $file || exit 1 touch $file ;; tar) shift # We have already tried tar in the generic part. # Look for gnutar/gtar before invocation to avoid ugly error # messages. if (gnutar --version > /dev/null 2>&1); then gnutar "$@" && exit 0 fi if (gtar --version > /dev/null 2>&1); then gtar "$@" && exit 0 fi firstarg="$1" if shift; then case "$firstarg" in *o*) firstarg=`echo "$firstarg" | sed s/o//` tar "$firstarg" "$@" && exit 0 ;; esac case "$firstarg" in *h*) firstarg=`echo "$firstarg" | sed s/h//` tar "$firstarg" "$@" && exit 0 ;; esac fi echo 1>&2 "\ WARNING: I can't seem to be able to run \`tar' with the given arguments. You may want to install GNU tar or Free paxutils, or check the command line arguments." exit 1 ;; *) echo 1>&2 "\ WARNING: \`$1' is needed, and is $msg. You might have modified some files without having the proper tools for further handling them. Check the \`README' file, it often tells you about the needed prerequisites for installing this package. You may also peek at any GNU archive site, in case some other package would contain this missing \`$1' program." exit 1 ;; esac exit 0 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: sphinx-2.0.4-release/config/depcomp0000755000176700017710000003710010441541645016572 0ustar deogardeogar#! /bin/sh # depcomp - compile a program generating dependencies as side-effects scriptversion=2005-07-09.11 # Copyright (C) 1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Alexandre Oliva . case $1 in '') echo "$0: No command. Try \`$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) cat <<\EOF Usage: depcomp [--help] [--version] PROGRAM [ARGS] Run PROGRAMS ARGS to compile a file, generating dependencies as side-effects. Environment variables: depmode Dependency tracking mode. source Source file read by `PROGRAMS ARGS'. object Object file output by `PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. tmpdepfile Temporary file to use when outputing dependencies. libtool Whether libtool is used (yes/no). Report bugs to . EOF exit $? ;; -v | --v*) echo "depcomp $scriptversion" exit $? ;; esac if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 fi # Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. depfile=${depfile-`echo "$object" | sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} rm -f "$tmpdepfile" # Some modes work just like other modes, but use different flags. We # parameterize here, but still list the modes in the big case below, # to make depend.m4 easier to write. Note that we *cannot* use a case # here, because this file can only contain one case statement. if test "$depmode" = hp; then # HP compiler uses -M and no extra arg. gccflag=-M depmode=gcc fi if test "$depmode" = dashXmstdout; then # This is just like dashmstdout with a different argument. dashmflag=-xM depmode=dashmstdout fi case "$depmode" in gcc3) ## gcc 3 implements dependency tracking that does exactly what ## we want. Yay! Note: for some reason libtool 1.4 doesn't like ## it if -MD -MP comes after the -MF stuff. Hmm. "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi mv "$tmpdepfile" "$depfile" ;; gcc) ## There are various ways to get dependency output from gcc. Here's ## why we pick this rather obscure method: ## - Don't want to use -MD because we'd like the dependencies to end ## up in a subdir. Having to rename by hand is ugly. ## (We might end up doing this anyway to support other compilers.) ## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like ## -MM, not -M (despite what the docs say). ## - Using -M directly means running the compiler twice (even worse ## than renaming). if test -z "$gccflag"; then gccflag=-MD, fi "$@" -Wp,"$gccflag$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ## The second -e expression handles DOS-style file names with drive letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" ## This next piece of magic avoids the `deleted header file' problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. tr ' ' ' ' < "$tmpdepfile" | ## Some versions of gcc put a space before the `:'. On the theory ## that the space means something, we add a space to the output as ## well. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; sgi) if test "$libtool" = yes; then "$@" "-Wp,-MDupdate,$tmpdepfile" else "$@" -MDupdate "$tmpdepfile" fi stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files echo "$object : \\" > "$depfile" # Clip off the initial element (the dependent). Don't try to be # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; # the IRIX cc adds comments like `#:fec' to the end of the # dependency line. tr ' ' ' ' < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \ tr ' ' ' ' >> $depfile echo >> $depfile # The second pass generates a dummy entry for each header file. tr ' ' ' ' < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ >> $depfile else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile # "include basename.Plo" scheme. echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the # current directory. Also, the AIX compiler puts `$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. stripped=`echo "$object" | sed 's/\(.*\)\..*$/\1/'` tmpdepfile="$stripped.u" if test "$libtool" = yes; then "$@" -Wc,-M else "$@" -M fi stat=$? if test -f "$tmpdepfile"; then : else stripped=`echo "$stripped" | sed 's,^.*/,,'` tmpdepfile="$stripped.u" fi if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi if test -f "$tmpdepfile"; then outname="$stripped.o" # Each line is of the form `foo.o: dependent.h'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile" sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile" else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile # "include basename.Plo" scheme. echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; icc) # Intel's C compiler understands `-MD -MF file'. However on # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c # ICC 7.0 will fill foo.d with something like # foo.o: sub/foo.c # foo.o: sub/foo.h # which is wrong. We want: # sub/foo.o: sub/foo.c # sub/foo.o: sub/foo.h # sub/foo.c: # sub/foo.h: # ICC 7.1 will output # foo.o: sub/foo.c sub/foo.h # and will wrap long lines using \ : # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... "$@" -MD -MF "$tmpdepfile" stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each line is of the form `foo.o: dependent.h', # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this invocation # correctly. Breaking it into two sed invocations is a workaround. sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; tru64) # The Tru64 compiler uses -MD to generate dependencies as a side # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'. # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put # dependencies in `foo.d' instead, so we check for that too. # Subdirectories are respected. dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` test "x$dir" = "x$object" && dir= base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` if test "$libtool" = yes; then # With Tru64 cc, shared objects can also be used to make a # static library. This mecanism is used in libtool 1.4 series to # handle both shared and static libraries in a single compilation. # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d. # # With libtool 1.5 this exception was removed, and libtool now # generates 2 separate objects for the 2 libraries. These two # compilations output dependencies in in $dir.libs/$base.o.d and # in $dir$base.o.d. We have to check for both files, because # one of the two compilations can be disabled. We should prefer # $dir$base.o.d over $dir.libs/$base.o.d because the latter is # automatically cleaned when .libs/ is deleted, while ignoring # the former would cause a distcleancheck panic. tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4 tmpdepfile2=$dir$base.o.d # libtool 1.5 tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5 tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504 "$@" -Wc,-MD else tmpdepfile1=$dir$base.o.d tmpdepfile2=$dir$base.d tmpdepfile3=$dir$base.d tmpdepfile4=$dir$base.d "$@" -MD fi stat=$? if test $stat -eq 0; then : else rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" do test -f "$tmpdepfile" && break done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" # That's a tab and a space in the []. sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" else echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; #nosideeffect) # This comment above is used by automake to tell side-effect # dependency tracking mechanisms from slower ones. dashmstdout) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # Remove `-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done test -z "$dashmflag" && dashmflag=-M # Require at least two characters before searching for `:' # in the target name. This is to cope with DOS-style filenames: # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise. "$@" $dashmflag | sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" tr ' ' ' ' < "$tmpdepfile" | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; dashXmstdout) # This case only exists to satisfy depend.m4. It is never actually # run, as this mode is specially recognized in the preamble. exit 1 ;; makedepend) "$@" || exit $? # Remove any Libtool call if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # X makedepend shift cleared=no for arg in "$@"; do case $cleared in no) set ""; shift cleared=yes ;; esac case "$arg" in -D*|-I*) set fnord "$@" "$arg"; shift ;; # Strip any option that makedepend may not understand. Remove # the object too, otherwise makedepend will parse it as a source file. -*|$object) ;; *) set fnord "$@" "$arg"; shift ;; esac done obj_suffix="`echo $object | sed 's/^.*\././'`" touch "$tmpdepfile" ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" sed '1,2d' "$tmpdepfile" | tr ' ' ' ' | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" "$tmpdepfile".bak ;; cpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test $1 != '--mode=compile'; do shift done shift fi # Remove `-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done "$@" -E | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' | sed '$ s: \\$::' > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" cat < "$tmpdepfile" >> "$depfile" sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; msvisualcpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o, # because we must use -o when running libtool. "$@" || exit $? IFS=" " for arg do case "$arg" in "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") set fnord "$@" shift shift ;; *) set fnord "$@" "$arg" shift shift ;; esac done "$@" -E | sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile" echo " " >> "$depfile" . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; none) exec "$@" ;; *) echo "Unknown depmode $depmode" 1>&2 exit 1 ;; esac exit 0 # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: sphinx-2.0.4-release/acinclude.m40000644000176700017710000002300411162036141016126 0ustar deogardeogardnl --------------------------------------------------------------------------- dnl Macro: AC_CHECK_MYSQL dnl Check for custom MySQL paths in --with-mysql-* options. dnl If some paths are missing, check if mysql_config exists. dnl --------------------------------------------------------------------------- AC_DEFUN([AC_CHECK_MYSQL],[ mysqlconfig_locations="mysql_config /usr/bin/mysql_config /usr/local/bin/mysql_config /usr/local/mysql/bin/mysql_config /opt/mysql/bin/mysql_config /usr/pkg/bin/mysql_config" user_mysql_includes= user_mysql_libs= # check explicit MySQL root for mysql_config, include, lib if test [ x$1 != xyes -a x$1 != xno ] then mysqlroot=`echo $1 | sed -e 's+/$++'` if test [ -x "$mysqlroot/bin/mysql_config" ] then # if there's mysql_config, that's the best route mysqlconfig_locations="$mysqlroot/bin/mysql_config" elif test [ -d "$mysqlroot/include" -a -d "$mysqlroot/lib" ] then # explicit root; do not check well-known paths mysqlconfig_locations= # includes if test [ -d "$mysqlroot/include/mysql" ] then user_mysql_includes="$mysqlroot/include/mysql" else user_mysql_includes="$mysqlroot/include" fi # libs if test [ -d "$mysqlroot/lib/mysql" ] then user_mysql_libs="$mysqlroot/lib/mysql" else user_mysql_libs="$mysqlroot/lib" fi else AC_MSG_ERROR([invalid MySQL root directory '$mysqlroot'; neither bin/mysql_config, nor include/ and lib/ were found there]) fi fi # try running mysql_config AC_MSG_CHECKING([for mysql_config]) for mysqlconfig in $mysqlconfig_locations do if test [ -n "$mysqlconfig" ] then MYSQL_CFLAGS=`${mysqlconfig} --cflags 2>/dev/null` MYSQL_LIBS=`${mysqlconfig} --libs 2>/dev/null` if test [ $? -eq 0 ] then AC_MSG_RESULT([$mysqlconfig]) mysqlconfig= break else MYSQL_CFLAGS= MYSQL_LIBS= fi fi done if test [ -n "$mysqlconfig" ] then mysqlconfig_used= AC_MSG_RESULT([not found]) else mysqlconfig_used=yes fi # if there's nothing from mysql_config, check well-known include paths # explicit overrides will be applied later if test [ -z "$MYSQL_CFLAGS" ] then for CANDIDATE in "$user_mysql_includes" "/usr/local/mysql/include" "/usr/local/mysql/include/mysql" \ "/usr/include/mysql" do if test [ -n "$CANDIDATE" -a -r "$CANDIDATE/mysql.h" ] then MYSQL_CFLAGS="-I$CANDIDATE" break fi done fi # if there's nothing from mysql_config, check well-known library paths # explicit overrides will be applied later if test [ -z "$MYSQL_LIBS" ] then for CANDIDATE in "$user_mysql_libs" "/usr/lib64/mysql" \ "/usr/local/mysql/lib/mysql" "/usr/local/mysql/lib" \ "/usr/local/lib/mysql" "/usr/lib/mysql" \ "/opt/mysql/lib/mysql" "/usr/pkg/lib/mysql" do if test [ -n "$CANDIDATE" -a -d "$CANDIDATE" ] then MYSQL_LIBS="-L$CANDIDATE -lmysqlclient -lz" break fi done fi # apply explicit include path overrides AC_ARG_WITH([mysql-includes], AC_HELP_STRING([--with-mysql-includes], [path to MySQL header files]), [ac_cv_mysql_includes=$withval]) if test [ -n "$ac_cv_mysql_includes" ] then MYSQL_CFLAGS="-I$ac_cv_mysql_includes" fi # apply explicit lib path overrides AC_ARG_WITH([mysql-libs], AC_HELP_STRING([--with-mysql-libs], [path to MySQL libraries]), [ac_cv_mysql_libs=$withval]) if test [ -n "$ac_cv_mysql_libs" ] then # Trim trailing '.libs' if user passed it in --with-mysql-libs option ac_cv_mysql_libs=`echo ${ac_cv_mysql_libs} | sed -e 's/.libs$//' \ -e 's+.libs/$++'` MYSQL_LIBS="-L$ac_cv_mysql_libs -lmysqlclient -lz" fi # if we got options from mysqlconfig try to actually use them if test [ -n "$mysqlconfig_used" -a -n "$MYSQL_CFLAGS" -a -n "$MYSQL_LIBS" ] then _CFLAGS=$CFLAGS _LIBS=$LIBS CFLAGS="$CFLAGS $MYSQL_CFLAGS" LIBS="$LIBS $MYSQL_LIBS" AC_CHECK_FUNC(mysql_real_connect,[], [ # if mysql binary was built using a different compiler and we # got options from mysql_config some of them might not work # with compiler we will be using # throw away everything that isn't one of -D -L -I -l and retry MYSQL_CFLAGS=`echo $MYSQL_CFLAGS | sed -e 's/-[[^DLIl]][[^ ]]*//g'` MYSQL_LIBS=`echo $MYSQL_LIBS | sed -e 's/-[[^DLIl]][[^ ]]*//g'` CFLAGS="$_CFLAGS $MYSQL_CFLAGS" LIBS="$_LIBS $MYSQL_LIBS" unset ac_cv_func_mysql_real_connect AC_CHECK_FUNC(mysql_real_connect,[], [ # ... that didn't help # clear flags, the code below will complain MYSQL_CFLAGS= MYSQL_LIBS= ]) ]) CFLAGS=$_CFLAGS LIBS=$_LIBS fi # now that we did all we could, perform final checks AC_MSG_CHECKING([MySQL include files]) if test [ -z "$MYSQL_CFLAGS" ] then AC_MSG_ERROR([missing include files. ****************************************************************************** ERROR: cannot find MySQL include files. Check that you do have MySQL include files installed. The package name is typically 'mysql-devel'. If include files are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify includes location explicitly, using --with-mysql-includes; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** ]) else AC_MSG_RESULT([$MYSQL_CFLAGS]) fi AC_MSG_CHECKING([MySQL libraries]) if test [ -z "$MYSQL_LIBS" ] then AC_MSG_ERROR([missing libraries. ****************************************************************************** ERROR: cannot find MySQL libraries. Check that you do have MySQL libraries installed. The package name is typically 'mysql-devel'. If libraries are installed on your system, but you are still getting this message, you should do one of the following: 1) either specify libraries location explicitly, using --with-mysql-libs; 2) or specify MySQL installation root location explicitly, using --with-mysql; 3) or make sure that the path to 'mysql_config' program is listed in your PATH environment variable. To disable MySQL support, use --without-mysql option. ****************************************************************************** ]) else AC_MSG_RESULT([$MYSQL_LIBS]) fi ]) dnl --------------------------------------------------------------------------- dnl Macro: AC_CHECK_PGSQL dnl First check for custom PostgreSQL paths in --with-pgsql-* options. dnl If some paths are missing, check if pg_config exists. dnl --------------------------------------------------------------------------- AC_DEFUN([AC_CHECK_PGSQL],[ # Check for custom includes path if test [ -z "$ac_cv_pgsql_includes" ] then AC_ARG_WITH([pgsql-includes], AC_HELP_STRING([--with-pgsql-includes], [path to PostgreSQL header files]), [ac_cv_pgsql_includes=$withval]) fi if test [ -n "$ac_cv_pgsql_includes" ] then AC_CACHE_CHECK([PostgreSQL includes], [ac_cv_pgsql_includes], [ac_cv_pgsql_includes=""]) PGSQL_CFLAGS="-I$ac_cv_pgsql_includes" fi # Check for custom library path if test [ -z "$ac_cv_pgsql_libs" ] then AC_ARG_WITH([pgsql-libs], AC_HELP_STRING([--with-pgsql-libs], [path to PostgreSQL libraries]), [ac_cv_pgsql_libs=$withval]) fi if test [ -n "$ac_cv_pgsql_libs" ] then AC_CACHE_CHECK([PostgreSQL libraries], [ac_cv_pgsql_libs], [ac_cv_pgsql_libs=""]) PGSQL_LIBS="-L$ac_cv_pgsql_libs -lpq" fi # If some path is missing, try to autodetermine with pgsql_config if test [ -z "$ac_cv_pgsql_includes" -o -z "$ac_cv_pgsql_libs" ] then if test [ -z "$pgconfig" ] then AC_PATH_PROG(pgconfig,pg_config) fi if test [ -z "$pgconfig" ] then AC_MSG_ERROR([pg_config executable not found ******************************************************************************** ERROR: cannot find PostgreSQL libraries. If you want to compile with PosgregSQL support, you must either specify file locations explicitly using --with-pgsql-includes and --with-pgsql-libs options, or make sure path to pg_config is listed in your PATH environment variable. If you want to disable PostgreSQL support, use --without-pgsql option. ******************************************************************************** ]) else if test [ -z "$ac_cv_pgsql_includes" ] then AC_MSG_CHECKING(PostgreSQL C flags) PGSQL_CFLAGS="-I`${pgconfig} --includedir`" AC_MSG_RESULT($PGSQL_CFLAGS) fi if test [ -z "$ac_cv_pgsql_libs" ] then AC_MSG_CHECKING(PostgreSQL linker flags) PGSQL_LIBS="-L`${pgconfig} --libdir` -lpq" AC_MSG_RESULT($PGSQL_LIBS) fi fi fi ]) dnl --------------------------------------------------------------------------- dnl Macro: SPHINX_CONFIGURE_PART dnl dnl Tells what stage is ./configure running now, nicely formatted dnl --------------------------------------------------------------------------- dnl SPHINX_CONFIGURE_PART(MESSAGE) AC_DEFUN([SPHINX_CONFIGURE_PART],[ AC_MSG_RESULT() AC_MSG_RESULT([$1]) TMP=`echo $1 | sed -e sX.X-Xg` AC_MSG_RESULT([$TMP]) AC_MSG_RESULT() ]) dnl --------------------------------------------------------------------------- dnl Macro: SPHINX_CHECK_DEFINE dnl dnl Checks if this symbol is defined in that header file dnl --------------------------------------------------------------------------- AC_DEFUN([SPHINX_CHECK_DEFINE],[ AC_CACHE_CHECK([for $1 in $2],ac_cv_define_$1,[ AC_EGREP_CPP(YES_IS_DEFINED, [ #include <$2> #ifdef $1 YES_IS_DEFINED #endif ], ac_cv_define_$1=yes, ac_cv_define_$1=no) ]) if test "$ac_cv_define_$1" = "yes"; then AC_DEFINE(HAVE_$1, 1, [Define if $1 is defined in $2]) fi ]) sphinx-2.0.4-release/codeblocks/0000755000176700017710000000000011724063141016052 5ustar deogardeogarsphinx-2.0.4-release/codeblocks/search.cbp0000644000176700017710000000374011424561043020012 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/spelldump.cbp0000644000176700017710000000375411424561043020557 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/libsphinx.cbp0000644000176700017710000000574511320433132020544 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/searchd.cbp0000644000176700017710000000674311424561043020164 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/README.txt0000644000176700017710000000011111220101253017525 0ustar deogardeogarYou need to run ../configure script in order to use Code::Blocks projectssphinx-2.0.4-release/codeblocks/indexer.cbp0000644000176700017710000000411211424561043020175 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/tests.cbp0000644000176700017710000000373411424561043017712 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/testrt.cbp0000644000176700017710000000373611424561043020077 0ustar deogardeogar sphinx-2.0.4-release/codeblocks/indextool.cbp0000644000176700017710000000375411424561043020557 0ustar deogardeogar